code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
---|---|---|---|---|---|
# Copyright (c) 2014, Palo Alto Networks
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
try:
from unittest import mock
except ImportError:
import mock
import unittest
import uuid
import xml.etree.ElementTree as ET
import pan.xapi
import pandevice.base as Base
import pandevice.errors as Err
OBJECT_NAME = 'MyObjectName'
VSYS = 'vsys1'
class TestPanObject(unittest.TestCase):
def setUp(self):
self.obj = Base.PanObject(OBJECT_NAME)
def verify_object(self, obj, **kwargs):
self.assertEqual(
kwargs.get('name', None),
obj.name)
self.assertEqual(
kwargs.get('children', []),
obj.children)
self.assertEqual(
kwargs.get('parent', None),
obj.parent)
def test_create_with_name(self):
self.obj = Base.PanObject(OBJECT_NAME)
self.verify_object(self.obj, name=OBJECT_NAME)
def test_create_without_name(self):
self.obj = Base.PanObject()
self.verify_object(self.obj)
def test_str_of_object_with_name(self):
self.assertEqual(OBJECT_NAME, str(self.obj))
def test_str_of_object_without_name(self):
self.obj = Base.PanObject()
self.assertEqual('None', str(self.obj))
def test_has_callable_variables(self):
self.assertTrue(callable(self.obj.variables))
def test_property_vsys_without_parent(self):
self.assertIsNone(self.obj.vsys)
def test_property_vsys_with_parent(self):
self.obj.parent = mock.Mock(vsys=VSYS)
self.assertEqual(VSYS, self.obj.vsys)
def test_property_vsys_raises_error(self):
self.assertRaises(
Err.PanDeviceError,
setattr,
self.obj, 'vsys', 'foo')
def test_property_uid(self):
expected = OBJECT_NAME
ret_val = self.obj.uid
self.assertEqual(expected, ret_val)
def test_add_without_children(self):
CHILD_NAME = 'child'
child = Base.PanObject(CHILD_NAME)
ret_value = self.obj.add(child)
self.assertEqual(child, ret_value)
self.verify_object(
self.obj,
name=OBJECT_NAME,
children=[child, ])
self.verify_object(
child,
name=CHILD_NAME,
parent=self.obj)
def test_add_with_children(self):
CHILD1_NAME = 'FirstChild'
child1 = Base.PanObject(CHILD1_NAME)
child1.parent = self.obj
self.obj.children = [child1, ]
CHILD2_NAME = 'SecondChild'
child2 = Base.PanObject(CHILD2_NAME)
ret_val = self.obj.add(child2)
self.assertEqual(child2, ret_val)
self.verify_object(
self.obj,
name=OBJECT_NAME,
children=[child1, child2])
self.verify_object(
child1,
name=CHILD1_NAME,
parent=self.obj)
self.verify_object(
child2,
name=CHILD2_NAME,
parent=self.obj)
def test_insert_without_children(self):
CHILD_NAME = 'Child'
child = Base.PanObject(CHILD_NAME)
ret_val = self.obj.insert(0, child)
self.assertEqual(child, ret_val)
self.verify_object(
self.obj,
name=OBJECT_NAME,
children=[child, ])
self.verify_object(
child,
name=CHILD_NAME,
parent=self.obj)
def test_insert_with_children(self):
CHILD1_NAME = 'FirstChild'
child1 = Base.PanObject(CHILD1_NAME)
child1.parent = self.obj
CHILD3_NAME = 'ThirdChild'
child3 = Base.PanObject(CHILD3_NAME)
child3.parent = self.obj
self.obj.children = [child1, child3]
CHILD2_NAME = 'SecondChild'
child2 = Base.PanObject(CHILD2_NAME)
ret_val = self.obj.insert(1, child2)
self.assertEqual(child2, ret_val)
self.verify_object(
self.obj,
name=OBJECT_NAME,
children=[child1, child2, child3])
self.verify_object(
child1,
name=CHILD1_NAME,
parent=self.obj)
self.verify_object(
child2,
name=CHILD2_NAME,
parent=self.obj)
self.verify_object(
child3,
name=CHILD3_NAME,
parent=self.obj)
def test_extend_without_children(self):
CHILD1_NAME = 'FirstChild'
child1 = Base.PanObject(CHILD1_NAME)
CHILD2_NAME = 'SecondChild'
child2 = Base.PanObject(CHILD2_NAME)
children = [child1, child2]
ret_val = self.obj.extend(children)
self.assertIsNone(ret_val)
self.verify_object(
self.obj,
name=OBJECT_NAME,
children=children)
self.verify_object(
child1,
name=CHILD1_NAME,
parent=self.obj)
self.verify_object(
child2,
name=CHILD2_NAME,
parent=self.obj)
def test_extend_with_children(self):
CHILD1_NAME = 'FirstChild'
child1 = Base.PanObject(CHILD1_NAME)
child1.parent = self.obj
self.obj.children = [child1, ]
CHILD2_NAME = 'SecondChild'
child2 = Base.PanObject(CHILD2_NAME)
CHILD3_NAME = 'ThirdChild'
child3 = Base.PanObject(CHILD3_NAME)
new_children = [child2, child3]
all_children = [child1, child2, child3]
ret_val = self.obj.extend(new_children)
self.assertIsNone(ret_val)
self.verify_object(
self.obj,
name=OBJECT_NAME,
children=all_children)
self.verify_object(
child1,
name=CHILD1_NAME,
parent=self.obj)
self.verify_object(
child2,
name=CHILD2_NAME,
parent=self.obj)
self.verify_object(
child3,
name=CHILD3_NAME,
parent=self.obj)
def test_pop(self):
CHILD_NAME = 'Child'
child = Base.PanObject(CHILD_NAME)
child.parent = self.obj
self.obj.children = [child, ]
ret_val = self.obj.pop(0)
self.assertEqual(child, ret_val)
self.verify_object(
self.obj,
name=OBJECT_NAME)
self.verify_object(
child,
name=CHILD_NAME)
def test_pop_raises_error(self):
'''An invalid index should raise IndexError.'''
self.assertRaises(
IndexError,
self.obj.pop, 0)
def test_remove(self):
CHILD1_NAME = 'Child1'
child1 = Base.PanObject(CHILD1_NAME)
child1.parent = self.obj
CHILD2_NAME = 'Child2'
child2 = Base.PanObject(CHILD2_NAME)
child2.parent = self.obj
self.obj.children = [child1, child2]
ret_val = self.obj.remove(child2)
self.assertIsNone(ret_val)
self.verify_object(
self.obj,
name=OBJECT_NAME,
children=[child1, ])
self.verify_object(
child1,
name=CHILD1_NAME,
parent=self.obj)
self.verify_object(
child2,
name=CHILD2_NAME)
def test_remove_raises_error(self):
'''An invalid child should raise ValueError.'''
CHILD1_NAME = 'Child1'
child1 = Base.PanObject(CHILD1_NAME)
child1.parent = self.obj
CHILD2_NAME = 'Child2'
child2 = Base.PanObject(CHILD2_NAME)
self.obj.children = [child1, ]
self.assertRaises(
ValueError,
self.obj.remove, child2)
def test_remove_by_name_when_find_returns_index(self):
CHILD_NAME = 'MyChild'
self.obj.children = [1, 2, 3]
INDEX_VALUE = 4
self.obj.find_index = mock.Mock(return_value=INDEX_VALUE)
POP_RETURN_VALUE = 'foo'
self.obj.pop = mock.Mock(return_value=POP_RETURN_VALUE)
ret_val = self.obj.remove_by_name(CHILD_NAME, None)
self.assertEqual(POP_RETURN_VALUE, ret_val)
self.obj.find_index.assert_called_once_with(CHILD_NAME, None)
self.obj.pop.assert_called_once_with(INDEX_VALUE)
def test_remove_by_name_when_find_returns_none(self):
CHILD_NAME = 'foo'
self.obj.children = ['a', 'b', 'c']
self.obj.find_index = mock.Mock(return_value=None)
ret_val = self.obj.remove_by_name(CHILD_NAME, None)
self.assertIsNone(ret_val)
self.obj.find_index.assert_called_once_with(CHILD_NAME, None)
# Skipping removeall
# Skipping xpath_nosuffix
# Skipping xpath_short
def test_xpath_vsys_without_parent(self):
ret_val = self.obj.xpath_vsys()
self.assertIsNone(ret_val)
def test_xpath_vsys_with_parent(self):
expected_value = 'foo'
spec = {
'xpath_vsys.return_value': expected_value,
}
self.obj.parent = mock.Mock(**spec)
ret_val = self.obj.xpath_vsys()
self.assertEqual(expected_value, ret_val)
self.obj.parent.xpath_vsys.assert_called_once_with()
def test_xpath_panorama_without_parent(self):
ret_val = self.obj.xpath_panorama()
self.assertIsNone(ret_val)
def test_xpath_panorama_with_parent(self):
expected_value = 'foo'
spec = {
'xpath_panorama.return_value': expected_value,
}
self.obj.parent = mock.Mock(**spec)
ret_val = self.obj.xpath_panorama()
self.assertEqual(expected_value, ret_val)
self.obj.parent.xpath_panorama.assert_called_once_with()
# Skip element()
@mock.patch('pandevice.base.ET')
def test_element_str(self, m_ET):
Element_Value = 42
self.obj.element = mock.Mock(return_value=Element_Value)
Tostring_Value = '42'
spec = {
'tostring.return_value': Tostring_Value,
}
m_ET.configure_mock(**spec)
ret_val = self.obj.element_str()
self.assertEqual(Tostring_Value, ret_val)
self.obj.element.assert_called_once_with()
m_ET.tostring.assert_called_once_with(Element_Value, encoding='utf-8')
@mock.patch('pandevice.base.PanObject.uid', new_callable=mock.PropertyMock)
@mock.patch('pandevice.base.ET')
def test_root_element_with_entry_suffix(self, m_ET, m_uid):
self.obj.SUFFIX = Base.ENTRY
Uid = 'uid'
expected = 'Value'
spec = {
'Element.return_value': expected,
}
m_ET.configure_mock(**spec)
m_uid.return_value = Uid
ret_val = self.obj._root_element()
self.assertEqual(expected, ret_val)
m_ET.Element.assert_called_once_with('entry', {'name': Uid})
@mock.patch('pandevice.base.PanObject.uid', new_callable=mock.PropertyMock)
@mock.patch('pandevice.base.ET')
def test_root_element_with_member_suffix(self, m_ET, m_uid):
self.obj.SUFFIX = Base.MEMBER
Uid = 'uid'
expected = mock.Mock(text=Uid)
spec = {
'Element.return_value': mock.Mock(),
}
m_ET.configure_mock(**spec)
m_uid.return_value = Uid
ret_val = self.obj._root_element()
self.assertEqual(Uid, ret_val.text)
m_ET.Element.assert_called_once_with('member')
@mock.patch('pandevice.base.ET')
def test_root_element_with_none_suffix_no_slashes(self, m_ET):
self.obj.SUFFIX = None
expected_tag = 'baz'
full_path = expected_tag
self.obj.XPATH = full_path
expected_value = '42'
spec = {
'Element.return_value': expected_value,
}
m_ET.configure_mock(**spec)
ret_val = self.obj._root_element()
self.assertEqual(expected_value, ret_val)
m_ET.Element.assert_called_once_with(expected_tag)
@mock.patch('pandevice.base.ET')
def test_root_element_with_none_suffix_multiple_slashes(self, m_ET):
self.obj.SUFFIX = None
expected_tag = 'baz'
full_path = '/foo/bar/baz'
self.obj.XPATH = full_path
expected_value = '42'
spec = {
'Element.return_value': expected_value,
}
m_ET.configure_mock(**spec)
ret_val = self.obj._root_element()
self.assertEqual(expected_value, ret_val)
m_ET.Element.assert_called_once_with(expected_tag)
# Skip _subelements
def test_check_child_methods_for_name_not_in_childmethods(self):
spec = {
'_check_child_methods.return_value': None,
}
for x in range(3):
m = mock.Mock(**spec)
self.obj.children.append(m)
Method = str(uuid.uuid4()).replace('-', '_')
ret_val = self.obj._check_child_methods(Method)
self.assertIsNone(ret_val)
for c in self.obj.children:
c._check_child_methods.assert_called_once_with(Method)
def test_check_child_methods_for_name_in_childmethods(self):
spec = {
'_check_child_methods.return_value': None,
}
for x in range(3):
m = mock.Mock(**spec)
self.obj.children.append(m)
Method = str(uuid.uuid4()).replace('-', '_')
self.obj.CHILDMETHODS += (Method, )
setattr(self.obj, 'child_{0}'.format(Method), mock.Mock())
ret_val = self.obj._check_child_methods(Method)
self.assertIsNone(ret_val)
m = getattr(self.obj, 'child_{0}'.format(Method))
m.assert_called_once_with()
for c in self.obj.children:
c._check_child_methods.assert_called_once_with(Method)
@mock.patch('pandevice.base.PanObject.uid', new_callable=mock.PropertyMock)
def test_apply_with_ha_sync(self, m_uid):
PanDeviceId = '42'
PanDeviceXpath = 'path'
PanDeviceElementStr = 'element string'
spec = {
'id': PanDeviceId,
}
m_pandevice = mock.Mock(**spec)
self.obj.nearest_pandevice = mock.Mock(return_value=m_pandevice)
self.obj.xpath = mock.Mock(return_value=PanDeviceXpath)
self.obj.element_str = mock.Mock(return_value=PanDeviceElementStr)
m_uid.return_value = 'uid'
for x in range(3):
child = mock.Mock(**spec)
self.obj.children.append(child)
ret_val = self.obj.apply()
self.assertIsNone(ret_val)
m_pandevice.set_config_changed.assert_called_once_with()
m_pandevice.active().xapi.edit.assert_called_once_with(
PanDeviceXpath,
PanDeviceElementStr,
retry_on_peer=self.obj.HA_SYNC,
)
self.obj.xpath.assert_called_once_with()
self.obj.element_str.assert_called_once_with()
for c in self.obj.children:
c._check_child_methods.assert_called_once_with('apply')
@mock.patch('pandevice.base.PanObject.uid', new_callable=mock.PropertyMock)
def test_apply_without_ha_sync(self, m_uid):
PanDeviceId = '42'
PanDeviceXpath = 'path'
PanDeviceElementStr = 'element string'
self.obj.HA_SYNC = False
spec = {
'id': PanDeviceId,
}
m_pandevice = mock.Mock(**spec)
self.obj.nearest_pandevice = mock.Mock(return_value=m_pandevice)
self.obj.xpath = mock.Mock(return_value=PanDeviceXpath)
self.obj.element_str = mock.Mock(return_value=PanDeviceElementStr)
m_uid.return_value = 'uid'
for x in range(3):
child = mock.Mock(**spec)
self.obj.children.append(child)
ret_val = self.obj.apply()
self.assertIsNone(ret_val)
m_pandevice.set_config_changed.assert_called_once_with()
m_pandevice.xapi.edit.assert_called_once_with(
PanDeviceXpath,
PanDeviceElementStr,
retry_on_peer=self.obj.HA_SYNC,
)
self.obj.xpath.assert_called_once_with()
self.obj.element_str.assert_called_once_with()
for c in self.obj.children:
c._check_child_methods.assert_called_once_with('apply')
@mock.patch('pandevice.base.PanObject.uid', new_callable=mock.PropertyMock)
def test_create_with_ha_sync(self, m_uid):
PanDeviceId = '42'
PanDeviceXpath = 'path'
PanDeviceElementStr = 'element string'
spec = {
'id': PanDeviceId,
}
m_pandevice = mock.Mock(**spec)
self.obj.nearest_pandevice = mock.Mock(return_value=m_pandevice)
self.obj.xpath_short = mock.Mock(return_value=PanDeviceXpath)
self.obj.element_str = mock.Mock(return_value=PanDeviceElementStr)
m_uid.return_value = 'uid'
for x in range(3):
child = mock.Mock(**spec)
self.obj.children.append(child)
ret_val = self.obj.create()
self.assertIsNone(ret_val)
m_pandevice.set_config_changed.assert_called_once_with()
m_pandevice.active().xapi.set.assert_called_once_with(
PanDeviceXpath,
PanDeviceElementStr,
retry_on_peer=self.obj.HA_SYNC,
)
self.obj.xpath_short.assert_called_once_with()
self.obj.element_str.assert_called_once_with()
for c in self.obj.children:
c._check_child_methods.assert_called_once_with('create')
@mock.patch('pandevice.base.PanObject.uid', new_callable=mock.PropertyMock)
def test_create_without_ha_sync(self, m_uid):
PanDeviceId = '42'
PanDeviceXpath = 'path'
PanDeviceElementStr = 'element string'
self.obj.HA_SYNC = False
spec = {
'id': PanDeviceId,
}
m_pandevice = mock.Mock(**spec)
self.obj.nearest_pandevice = mock.Mock(return_value=m_pandevice)
self.obj.xpath_short = mock.Mock(return_value=PanDeviceXpath)
self.obj.element_str = mock.Mock(return_value=PanDeviceElementStr)
m_uid.return_value = 'uid'
for x in range(3):
child = mock.Mock()
self.obj.children.append(child)
ret_val = self.obj.create()
self.assertIsNone(ret_val)
m_pandevice.set_config_changed.assert_called_once_with()
m_pandevice.xapi.set.assert_called_once_with(
PanDeviceXpath,
PanDeviceElementStr,
retry_on_peer=self.obj.HA_SYNC,
)
self.obj.xpath_short.assert_called_once_with()
self.obj.element_str.assert_called_once_with()
for c in self.obj.children:
c._check_child_methods.assert_called_once_with('create')
@mock.patch('pandevice.base.PanObject.uid', new_callable=mock.PropertyMock)
def test_delete_with_ha_sync_no_parent(self, m_uid):
PanDeviceId = '42'
PanDeviceXpath = 'path'
spec = {
'id': PanDeviceId,
}
m_pandevice = mock.Mock(**spec)
self.obj.nearest_pandevice = mock.Mock(return_value=m_pandevice)
self.obj.xpath = mock.Mock(return_value=PanDeviceXpath)
m_uid.return_value = 'uid'
for x in range(3):
child = mock.Mock(**spec)
self.obj.children.append(child)
ret_val = self.obj.delete()
self.assertIsNone(ret_val)
m_pandevice.set_config_changed.assert_called_once_with()
m_pandevice.active().xapi.delete.assert_called_once_with(
PanDeviceXpath,
retry_on_peer=self.obj.HA_SYNC,
)
self.obj.xpath.assert_called_once_with()
for c in self.obj.children:
c._check_child_methods.assert_called_once_with('delete')
@mock.patch('pandevice.base.PanObject.uid', new_callable=mock.PropertyMock)
def test_delete_with_ha_sync_and_parent(self, m_uid):
PanDeviceId = '42'
PanDeviceXpath = 'path'
Uid = 'uid'
spec = {
'id': PanDeviceId,
}
m_pandevice = mock.Mock(**spec)
self.obj.parent = mock.Mock()
self.obj.nearest_pandevice = mock.Mock(return_value=m_pandevice)
self.obj.xpath = mock.Mock(return_value=PanDeviceXpath)
m_uid.return_value = Uid
for x in range(3):
child = mock.Mock(**spec)
self.obj.children.append(child)
ret_val = self.obj.delete()
self.assertIsNone(ret_val)
self.obj.parent.remove.assert_called_once_with(self.obj)
m_pandevice.set_config_changed.assert_called_once_with()
m_pandevice.active().xapi.delete.assert_called_once_with(
PanDeviceXpath,
retry_on_peer=self.obj.HA_SYNC,
)
self.obj.xpath.assert_called_once_with()
for c in self.obj.children:
c._check_child_methods.assert_called_once_with('delete')
@mock.patch('pandevice.base.PanObject.uid', new_callable=mock.PropertyMock)
def test_delete_without_ha_sync(self, m_uid):
PanDeviceId = '42'
PanDeviceXpath = 'path'
m_uid.return_value = 'uid'
self.obj.HA_SYNC = False
spec = {
'id': PanDeviceId,
}
m_pandevice = mock.Mock(**spec)
self.obj.nearest_pandevice = mock.Mock(return_value=m_pandevice)
self.obj.xpath = mock.Mock(return_value=PanDeviceXpath)
for x in range(3):
child = mock.Mock()
# Skip update
# Skip refresh
# Skip refresh_variable
# Skip _refresh_children
@mock.patch('pandevice.base.PanObject.uid', new_callable=mock.PropertyMock)
def test__refresh_xml_default_args_none_suffix(self, m_uid):
Xpath = '/x/path'
lasttag = ''
expected = 'foo'
spec = {
'find.return_value': expected,
}
m_root = mock.Mock(**spec)
m_uid.return_value = 'uid'
spec = {
'id': 'myid',
'xapi.get.return_value': m_root,
}
m_pandevice = mock.Mock(**spec)
self.obj.nearest_pandevice = mock.Mock(return_value=m_pandevice)
self.obj.xpath = mock.Mock(return_value=Xpath)
ret_val = self.obj._refresh_xml(False, True)
self.assertEqual(expected, ret_val)
m_pandevice.xapi.get.assert_called_once_with(
Xpath,
retry_on_peer=self.obj.HA_SYNC)
self.obj.xpath.assert_called_once_with()
m_root.find.assert_called_once_with(
'result/{0}'.format(lasttag))
@mock.patch('pandevice.base.PanObject.uid', new_callable=mock.PropertyMock)
def test__refresh_xml_default_args_with_member_suffix(self, m_uid):
Xpath = '/x/path'
lasttag = 'member'
expected = 'foo'
spec = {
'find.return_value': expected,
}
m_root = mock.Mock(**spec)
m_uid.return_value = 'uid'
spec = {
'id': 'myid',
'xapi.get.return_value': m_root,
}
m_pandevice = mock.Mock(**spec)
self.obj.nearest_pandevice = mock.Mock(return_value=m_pandevice)
self.obj.xpath = mock.Mock(return_value=Xpath)
self.obj.SUFFIX = Base.MEMBER
ret_val = self.obj._refresh_xml(False, True)
self.assertEqual(expected, ret_val)
m_pandevice.xapi.get.assert_called_once_with(
Xpath,
retry_on_peer=self.obj.HA_SYNC)
self.obj.xpath.assert_called_once_with()
m_root.find.assert_called_once_with(
'result/{0}'.format(lasttag))
@mock.patch('pandevice.base.PanObject.uid', new_callable=mock.PropertyMock)
def test__refresh_xml_default_args_with_entry_suffix(self, m_uid):
Xpath = '/x/path'
lasttag = 'entry'
expected = 'foo'
spec = {
'find.return_value': expected,
}
m_root = mock.Mock(**spec)
m_uid.return_value = 'uid'
spec = {
'id': 'myid',
'xapi.get.return_value': m_root,
}
m_pandevice = mock.Mock(**spec)
self.obj.nearest_pandevice = mock.Mock(return_value=m_pandevice)
self.obj.xpath = mock.Mock(return_value=Xpath)
self.obj.SUFFIX = Base.ENTRY
ret_val = self.obj._refresh_xml(False, True)
self.assertEqual(expected, ret_val)
m_pandevice.xapi.get.assert_called_once_with(
Xpath,
retry_on_peer=self.obj.HA_SYNC)
self.obj.xpath.assert_called_once_with()
m_root.find.assert_called_once_with(
'result/{0}'.format(lasttag))
@mock.patch('pandevice.base.PanObject.uid', new_callable=mock.PropertyMock)
def test__refresh_xml_with_running_config(self, m_uid):
Xpath = '/x/path'
lasttag = ''
expected = 'foo'
spec = {
'find.return_value': expected,
}
m_root = mock.Mock(**spec)
m_uid.return_value = 'uid'
spec = {
'id': 'myid',
'xapi.show.return_value': m_root,
}
m_pandevice = mock.Mock(**spec)
self.obj.nearest_pandevice = mock.Mock(return_value=m_pandevice)
self.obj.xpath = mock.Mock(return_value=Xpath)
self.obj.refresh = mock.Mock()
ret_val = self.obj._refresh_xml(True, True)
self.assertEqual(expected, ret_val)
m_pandevice.xapi.show.assert_called_once_with(
Xpath,
retry_on_peer=self.obj.HA_SYNC)
self.obj.xpath.assert_called_once_with()
m_root.find.assert_called_once_with(
'result/{0}'.format(lasttag))
@mock.patch('pandevice.base.PanObject.uid', new_callable=mock.PropertyMock)
def test__refresh_xml_no_refresh_children(self, m_uid):
Xpath = '/x/path'
lasttag = ''
expected = 'foo'
spec = {
'find.return_value': expected,
}
m_root = mock.Mock(**spec)
m_uid.return_value = 'uid'
spec = {
'id': 'myid',
'xapi.get.return_value': m_root,
}
m_pandevice = mock.Mock(**spec)
self.obj.nearest_pandevice = mock.Mock(return_value=m_pandevice)
self.obj.xpath = mock.Mock(return_value=Xpath)
self.obj.refresh = mock.Mock()
ret_val = self.obj._refresh_xml(False, False)
self.assertEqual(expected, ret_val)
m_pandevice.xapi.get.assert_called_once_with(
Xpath,
retry_on_peer=self.obj.HA_SYNC)
self.obj.xpath.assert_called_once_with()
m_root.find.assert_called_once_with(
'result/{0}'.format(lasttag))
@mock.patch('pandevice.base.PanObject.uid', new_callable=mock.PropertyMock)
def test__refresh_xml_api_action_raises_pannosuchnode_with_exceptions_on_raises_error(self, m_uid):
Xpath = '/x/path'
spec = {
'id': 'myid',
'xapi.get.side_effect': Err.PanNoSuchNode,
}
m_pandevice = mock.Mock(**spec)
self.obj.nearest_pandevice = mock.Mock(return_value=m_pandevice)
self.obj.xpath = mock.Mock(return_value=Xpath)
m_uid.return_value = 'uid'
self.assertRaises(
Err.PanObjectMissing,
self.obj._refresh_xml, False, True)
m_pandevice.xapi.get.assert_called_once_with(
Xpath,
retry_on_peer=self.obj.HA_SYNC)
self.obj.xpath.assert_called_once_with()
@mock.patch('pandevice.base.PanObject.uid', new_callable=mock.PropertyMock)
def test__refresh_xml_api_action_raises_pannosuchnode_with_exceptions_off_returns_none(self, m_uid):
Xpath = '/x/path'
spec = {
'id': 'myid',
'xapi.get.side_effect': Err.PanNoSuchNode,
}
m_pandevice = mock.Mock(**spec)
self.obj.nearest_pandevice = mock.Mock(return_value=m_pandevice)
self.obj.xpath = mock.Mock(return_value=Xpath)
m_uid.return_value = 'uid'
ret_val = self.obj._refresh_xml(False, False)
self.assertIsNone(ret_val)
m_pandevice.xapi.get.assert_called_once_with(
Xpath,
retry_on_peer=self.obj.HA_SYNC)
self.obj.xpath.assert_called_once_with()
@mock.patch('pandevice.base.PanObject.uid', new_callable=mock.PropertyMock)
def test__refresh_xml_api_action_raises_panxapierror_with_exceptions_on_raises_error(self, m_uid):
Xpath = '/x/path'
spec = {
'id': 'myid',
'xapi.get.side_effect': pan.xapi.PanXapiError,
}
m_pandevice = mock.Mock(**spec)
self.obj.nearest_pandevice = mock.Mock(return_value=m_pandevice)
self.obj.xpath = mock.Mock(return_value=Xpath)
m_uid.return_value = 'uid'
self.assertRaises(
Err.PanObjectMissing,
self.obj._refresh_xml, False, True)
m_pandevice.xapi.get.assert_called_once_with(
Xpath,
retry_on_peer=self.obj.HA_SYNC)
self.obj.xpath.assert_called_once_with()
@mock.patch('pandevice.base.PanObject.uid', new_callable=mock.PropertyMock)
def test__refresh_xml_api_action_raises_panxapierror_with_exceptions_off_returns_none(self, m_uid):
Xpath = '/x/path'
spec = {
'id': 'myid',
'xapi.get.side_effect': pan.xapi.PanXapiError,
}
m_pandevice = mock.Mock(**spec)
self.obj.nearest_pandevice = mock.Mock(return_value=m_pandevice)
self.obj.xpath = mock.Mock(return_value=Xpath)
m_uid.return_value = 'uid'
ret_val = self.obj._refresh_xml(False, False)
self.assertIsNone(ret_val)
m_pandevice.xapi.get.assert_called_once_with(
Xpath,
retry_on_peer=self.obj.HA_SYNC)
self.obj.xpath.assert_called_once_with()
@mock.patch('pandevice.base.PanObject.uid', new_callable=mock.PropertyMock)
def test__refresh_xml_find_fails_with_exceptions_on_raises_error(self, m_uid):
Xpath = '/x/path'
lasttag = ''
expected = 'foo'
spec = {
'find.return_value': None,
}
m_root = mock.Mock(**spec)
m_uid.return_value = 'uid'
spec = {
'id': 'myid',
'xapi.get.return_value': m_root,
}
m_pandevice = mock.Mock(**spec)
self.obj.nearest_pandevice = mock.Mock(return_value=m_pandevice)
self.obj.xpath = mock.Mock(return_value=Xpath)
self.assertRaises(
Err.PanObjectMissing,
self.obj._refresh_xml, False, True)
m_pandevice.xapi.get.assert_called_once_with(
Xpath,
retry_on_peer=self.obj.HA_SYNC)
self.obj.xpath.assert_called_once_with()
m_root.find.assert_called_once_with(
'result/{0}'.format(lasttag))
@mock.patch('pandevice.base.PanObject.uid', new_callable=mock.PropertyMock)
def test__refresh_xml_find_fails_with_exceptions_off_returns_none(self, m_uid):
'''Requires exceptions=False.'''
Xpath = '/x/path'
lasttag = ''
expected = 'foo'
spec = {
'find.return_value': None,
}
m_root = mock.Mock(**spec)
m_uid.return_value = 'uid'
spec = {
'id': 'myid',
'xapi.get.return_value': m_root,
}
m_pandevice = mock.Mock(**spec)
self.obj.nearest_pandevice = mock.Mock(return_value=m_pandevice)
self.obj.xpath = mock.Mock(return_value=Xpath)
ret_val = self.obj._refresh_xml(False, False)
self.assertIsNone(ret_val)
m_pandevice.xapi.get.assert_called_once_with(
Xpath,
retry_on_peer=self.obj.HA_SYNC)
self.obj.xpath.assert_called_once_with()
m_root.find.assert_called_once_with(
'result/{0}'.format(lasttag))
def test_nearest_pandevice(self):
expected = 'return value'
self.obj._nearest_pandevice = mock.Mock(return_value=expected)
ret_val = self.obj.nearest_pandevice()
self.assertEqual(expected, ret_val)
self.obj._nearest_pandevice.assert_called_once_with()
def test__nearest_pandevice_with_parent(self):
expected = 'ParentObject'
spec = {
'_nearest_pandevice.return_value': expected,
}
self.obj.parent = mock.Mock(**spec)
ret_val = self.obj._nearest_pandevice()
self.assertEqual(expected, ret_val)
self.obj.parent._nearest_pandevice.assert_called_once_with()
def test__nearest_pandevice_without_parent_raises_error(self):
self.assertRaises(
Err.PanDeviceNotSet,
self.obj._nearest_pandevice)
def test_panorama_with_parent(self):
expected = 'PanoramaObject'
spec = {
'panorama.return_value': expected,
}
self.obj.parent = mock.Mock(**spec)
ret_val = self.obj.panorama()
self.assertEqual(expected, ret_val)
self.obj.parent.panorama.assert_called_once_with()
def test_panorama_without_parent_raises_error(self):
self.assertRaises(
Err.PanDeviceNotSet,
self.obj.panorama)
def test_devicegroup_with_parent(self):
expected = 'DeviceGroup'
spec = {
'devicegroup.return_value': expected,
}
self.obj.parent = mock.Mock(**spec)
ret_val = self.obj.devicegroup()
self.assertEqual(expected, ret_val)
self.obj.parent.devicegroup.assert_called_once_with()
def test_devicegroup_without_parent(self):
ret_val = self.obj.devicegroup()
self.assertIsNone(ret_val)
# Skip find
# Skip findall
# Skip find_or_create
# Skip findall_or_create
# Skip find_index
# Skip applyall
# Skip refreshall
# Skip refreshall_from_xml
# Skip _parse_xml
class TestParamPath(unittest.TestCase):
def setUp(self):
self.elm = ET.Element('myroot')
def test_element_for_exclude_returns_none(self):
settings = {'baz': 'jack'}
p = Base.ParamPath('baz', path='foo/bar', vartype=None,
condition=None, values=None, exclude=True)
result = p.element(self.elm, settings, False)
self.assertIsNone(result)
def test_element_path_has_variable(self):
p = Base.ParamPath('baz', path='{mode}/bar/baz', vartype=None,
condition=None, values=None)
settings = {'baz': 'jack', 'mode': 'layer3'}
result = p.element(self.elm, settings, False)
self.assertIsNotNone(result)
elm = result.find('./layer3/bar/baz')
self.assertIsNotNone(elm, msg='Failed: elm = {0}'.format(ET.tostring(result)))
self.assertEqual(settings['baz'], elm.text)
def test_element_for_vartype_member_for_string(self):
p = Base.ParamPath('baz', path='foo/bar/baz', vartype='member',
condition=None, values=None)
settings = {'baz': 'jack'}
result = p.element(self.elm, settings, False)
self.assertIsNotNone(result)
elm = result.findall('./foo/bar/baz/member')
self.assertTrue(elm)
self.assertEqual(1, len(elm))
self.assertEqual(settings['baz'], elm[0].text)
def test_element_for_vartype_member_for_list(self):
p = Base.ParamPath('baz', path='foo/bar/baz', vartype='member',
condition=None, values=None)
settings = {'baz': ['jack', 'john', 'jane', 'margret']}
result = p.element(self.elm, settings, False)
self.assertIsNotNone(result)
elms = result.findall('./foo/bar/baz/member')
self.assertEqual(len(settings['baz']), len(elms))
for elm in elms:
self.assertTrue(elm.text in settings['baz'])
class Abouter(object):
def __init__(self, mode='layer3'):
self.mode = mode
def _about_object(self):
return {'mode': self.mode}
class ParentClass1(Abouter):
pass
class ParentClass2(Abouter):
pass
class UnassociatedParent(Abouter):
pass
class TestParentAwareXpathBasics(unittest.TestCase):
DEFAULT_PATH_1 = '/default/path/1'
DEFAULT_PATH_2 = '/default/path/2'
SPECIFIED_PATH_1 = '/some/specific/path/1'
SPECIFIED_PATH_2 = '/some/specific/path/2'
def setUp(self):
self.obj = Base.ParentAwareXpath()
self.obj.add_profile(value=self.DEFAULT_PATH_1)
self.obj.add_profile('1.0.0', self.DEFAULT_PATH_2)
self.obj.add_profile(value=self.SPECIFIED_PATH_1,
parents=('ParentClass1', 'ParentClass2'))
self.obj.add_profile('2.0.0', self.SPECIFIED_PATH_2,
('ParentClass1', 'ParentClass2'))
def test_old_default_xpath(self):
parent = UnassociatedParent()
self.assertEqual(
self.DEFAULT_PATH_1,
self.obj._get_versioned_value(
(0, 5, 0), parent))
def test_new_default_xpath(self):
parent = UnassociatedParent()
self.assertEqual(
self.DEFAULT_PATH_2,
self.obj._get_versioned_value(
(1, 0, 0), parent))
def test_old_specefied_xpath_for_class1(self):
parent = ParentClass1()
self.assertEqual(
self.SPECIFIED_PATH_1,
self.obj._get_versioned_value(
(0, 5, 0), parent))
def test_new_specefied_xpath_for_class1(self):
parent = ParentClass1()
self.assertEqual(
self.SPECIFIED_PATH_2,
self.obj._get_versioned_value(
(2, 0, 0), parent))
def test_old_specefied_xpath_for_class2(self):
parent = ParentClass2()
self.assertEqual(
self.SPECIFIED_PATH_1,
self.obj._get_versioned_value(
(0, 0, 0), parent))
def test_new_specefied_xpath_for_class2(self):
parent = ParentClass2()
self.assertEqual(
self.SPECIFIED_PATH_2,
self.obj._get_versioned_value(
(5, 0, 0), parent))
def test_no_parent_gets_newest_version(self):
parent = None
self.assertEqual(
self.DEFAULT_PATH_2,
self.obj._get_versioned_value(
Base.VersionedPanObject._UNKNOWN_PANOS_VERSION, parent))
def test_no_fallback_raises_value_error(self):
parent = None
obj = Base.ParentAwareXpath()
obj.add_profile(
parents=('ParentClass1', ),
value='/some/path',
)
self.assertRaises(
ValueError,
obj._get_versioned_value,
(1, 0, 0), parent)
class TestParentAwareXpathWithParams(unittest.TestCase):
OLD_LAYER3_PATH = '/units/layer3/old'
NEW_LAYER3_PATH = '/units/layer3/new'
OLD_LAYER2_PATH = '/units/layer2/old'
NEW_LAYER2_PATH = '/units/layer2/new'
def setUp(self):
self.obj = Base.ParentAwareXpath()
self.obj.add_profile(
parents=('ParentClass1', None),
value=self.OLD_LAYER3_PATH)
self.obj.add_profile(
version='1.0.0',
parents=('ParentClass1', None),
value=self.NEW_LAYER3_PATH)
self.obj.add_profile(
parents=('ParentClass1', ),
parent_param='mode',
parent_param_values=['junk', 'layer2'],
value=self.OLD_LAYER2_PATH)
self.obj.add_profile(
version='2.0.0',
parents=('ParentClass1', ),
parent_param='mode',
parent_param_values=['junk', 'layer2'],
value=self.NEW_LAYER2_PATH)
def test_old_default_path(self):
parent = UnassociatedParent('foo')
self.assertEqual(
self.OLD_LAYER3_PATH,
self.obj._get_versioned_value(
(0, 5, 0), parent))
def test_known_parent_and_param_for_old_l3_path(self):
parent = ParentClass1()
self.assertEqual(
self.OLD_LAYER3_PATH,
self.obj._get_versioned_value(
(0, 5, 0), parent))
def test_known_parent_and_param_for_new_l3_path(self):
parent = ParentClass1()
self.assertEqual(
self.NEW_LAYER3_PATH,
self.obj._get_versioned_value(
(1, 5, 0), parent))
def test_known_parent_and_param_for_old_l2_path(self):
parent = ParentClass1('layer2')
self.assertEqual(
self.OLD_LAYER2_PATH,
self.obj._get_versioned_value(
(0, 1, 0), parent))
def test_known_parent_and_param_for_new_l2_path(self):
parent = ParentClass1('layer2')
self.assertEqual(
self.NEW_LAYER2_PATH,
self.obj._get_versioned_value(
(5, 1, 0), parent))
def test_no_parent_gets_newest_default(self):
parent = None
self.assertEqual(
self.NEW_LAYER3_PATH,
self.obj._get_versioned_value(
Base.VersionedPanObject._UNKNOWN_PANOS_VERSION, parent))
class MyVersionedObject(Base.VersionedPanObject):
SUFFIX = Base.ENTRY
def _setup(self):
params = []
params.append(Base.VersionedParamPath(
'entries', path='multiple/entries', vartype='entry'))
params.append(Base.VersionedParamPath(
'members', path='multiple/members', vartype='member'))
params.append(Base.VersionedParamPath(
'someint', path='someint', vartype='int'))
self._params = tuple(params)
class TestEqual(unittest.TestCase):
def test_ordered(self):
o1 = MyVersionedObject('a', ['a', 'b'], ['c', 'd'], 5)
o2 = MyVersionedObject('a', ['a', 'b'], ['c', 'd'], 5)
self.assertTrue(o1.equal(o2))
def test_unordered_entries(self):
o1 = MyVersionedObject('a', ['a', 'b'], ['c', 'd'], 5)
o2 = MyVersionedObject('a', ['b', 'a'], ['c', 'd'], 5)
self.assertTrue(o1.equal(o2))
def test_unordered_members(self):
o1 = MyVersionedObject('a', ['a', 'b'], ['c', 'd'], 5)
o2 = MyVersionedObject('a', ['a', 'b'], ['d', 'c'], 5)
self.assertTrue(o1.equal(o2))
def test_values_are_unchanged_after_comparison(self):
o1 = MyVersionedObject('a', ['a', 'b'], ['c', 'd'], 5)
o2 = MyVersionedObject('a', ['b', 'a'], ['d', 'c'], 5)
o1.equal(o2)
self.assertEqual(o1.entries, ['a', 'b'])
self.assertEqual(o1.members, ['c', 'd'])
self.assertEqual(o2.entries, ['b', 'a'])
self.assertEqual(o2.members, ['d', 'c'])
def test_str_list_field_is_equal(self):
o1 = MyVersionedObject('a', ['a', ], ['c', 'd'], 5)
o2 = MyVersionedObject('a', 'a', ['c', 'd'], 5)
self.assertTrue(o1.equal(o2))
def test_unequal_entries_returns_false(self):
o1 = MyVersionedObject('a', ['a', 'b'], ['c', 'd'], 5)
o2 = MyVersionedObject('a', ['a', 'i'], ['c', 'd'], 5)
self.assertFalse(o1.equal(o2))
def test_unequal_members_returns_false(self):
o1 = MyVersionedObject('a', ['a', 'b'], ['c', 'd'], 5)
o2 = MyVersionedObject('a', ['a', 'b'], ['c', 'i'], 5)
self.assertFalse(o1.equal(o2))
def test_unequal_ints_returns_false(self):
o1 = MyVersionedObject('a', ['a', 'b'], ['c', 'd'], 5)
o2 = MyVersionedObject('a', ['a', 'b'], ['c', 'd'], 6)
self.assertFalse(o1.equal(o2))
class TestTree(unittest.TestCase):
def test_dot(self):
import pandevice.device as Device
expected = 'digraph configtree {graph [rankdir=LR, fontsize=10, margin=0.001];' \
'node [shape=box, fontsize=10, height=0.001, margin=0.1, ordering=out];' \
'"PanDevice : None" [style=filled fillcolor= ' \
'URL="http://pandevice.readthedocs.io/en/latest/module-base.html#pandevice.base.PanDevice" ' \
'target="_blank"];"SystemSettings : " [style=filled fillcolor=lightpink ' \
'URL="http://pandevice.readthedocs.io/en/latest/module-device.html' \
'#pandevice.device.SystemSettings" target="_blank"];' \
'"PanDevice : None" -> "SystemSettings : ";}'
fw = Base.PanDevice(hostname=None, serial='Serial')
sys = Device.SystemSettings()
fw.add(sys)
ret_val = fw.dot()
self.assertEqual(ret_val, expected)
if __name__=='__main__':
unittest.main()
| PaloAltoNetworks-BD/SplunkforPaloAltoNetworks | SplunkforPaloAltoNetworks/bin/lib/pandevice/tests/test_base.py | Python | isc | 44,906 |
"""Language model baselines in TensorFlow
"""
from itertools import chain
from baseline.tf.tfy import *
from baseline.version import __version__
from baseline.model import LanguageModel, register_model
from baseline.tf.embeddings import *
from baseline.tf.tfy import new_placeholder_dict, TRAIN_FLAG, lstm_cell_w_dropout
from baseline.utils import read_json, write_json, MAGIC_VARS
class LanguageModelBase(tf.keras.Model, LanguageModel):
"""Base for all baseline implementations of LMs
This class provides a loose skeleton around which the baseline models
are built. This essentially consists of dividing up the network into a logical separation between "embedding",
or composition of lookup tables to build a vector representation of a temporal input, "decoding",
or the conversion of temporal data to a decoded representation, and "output" --
a projection to output space and a softmax
"""
def __init__(self):
"""Construct a base LM
"""
super().__init__()
self.saver = None
self.hsz = None
self.probs = None
self._unserializable = []
def save_values(self, basename):
"""Save tensor files out
:param basename: Base name of model
:return:
"""
if not tf.executing_eagerly():
self.saver.save(self.sess, basename, write_meta_graph=False)
else:
self.save_weights(f"{basename}.wgt")
def save_md(self, basename):
"""This method saves out a `.state` file containing meta-data from these classes and any info
registered by a user-defined derived class as a `property`. Also write the `graph` and `saver` and `labels`
:param basename:
:return:
"""
write_json(self._state, basename + '.state')
for key, embedding in self.embeddings.items():
embedding.save_md(basename + '-{}-md.json'.format(key))
def _record_state(self, embeddings, **kwargs):
"""
First, write out the embedding names, so we can recover those. Then do a deepcopy on the model init params
so that it can be recreated later. Anything that is a placeholder directly on this model needs to be removed
:param kwargs:
:return:
"""
embeddings_info = {}
for k, v in embeddings.items():
embeddings_info[k] = v.__class__.__name__
blacklist = set(chain(self._unserializable, MAGIC_VARS, embeddings.keys()))
self._state = {k: v for k, v in kwargs.items() if k not in blacklist}
self._state.update({
'version': __version__,
'module': self.__class__.__module__,
'class': self.__class__.__name__,
'embeddings': embeddings_info,
})
def set_saver(self, saver):
"""Connect a `tf.Saver` to the model
:param saver: A saver
:return: None
"""
self.saver = saver
def save(self, basename):
"""Save the model
:param basename: The model prefix
:return:
"""
self.save_md(basename)
self.save_values(basename)
def _create_loss(self, scope):
with tf.compat.v1.variable_scope(scope):
targets = tf.reshape(self.y, [-1])
bt_x_v = tf.nn.log_softmax(tf.reshape(self.logits, [-1, self.vsz]), axis=-1)
one_hots = tf.one_hot(targets, self.vsz)
example_loss = -tf.reduce_sum(one_hots * bt_x_v, axis=-1)
loss = tf.reduce_mean(example_loss)
return loss
def create_loss(self):
"""Create training loss operator
:return: loss
"""
return self._create_loss(scope='loss')
def create_test_loss(self):
"""Create test loss operator
:return: loss
"""
return self._create_loss(scope='test_loss')
def make_input(self, batch_dict, train=False):
"""When we are running with `DataFeed`s, need to transform to `feed_dict`s
:param batch_dict: The batch for a step
:param train: (`bool`) Are we training (or evaluating)?
:return: A `feed_dict`
"""
if not tf.executing_eagerly():
batch_dict_for_model = new_placeholder_dict(train)
for key in self.src_keys:
batch_dict_for_model["{}:0".format(key)] = batch_dict[key]
y = batch_dict.get('y')
if y is not None:
batch_dict_for_model[self.y] = batch_dict['y']
else:
SET_TRAIN_FLAG(train)
batch_dict_for_model = {}
for key in self.src_keys:
batch_dict_for_model[key] = batch_dict[key]
return batch_dict_for_model
def predict(self, batch_dict):
"""Do prediction from a `batch_dict`
:param batch_dict: A step of data
:return: The softmax output for this step
"""
batch_dict = self.make_input(batch_dict)
if not tf.executing_eagerly():
step_softmax = self.sess.run(self.probs, batch_dict)
else:
# FIXME: This is not really the proper handling for eager mode
# We want to be able to pass in the last hidden state and emit the current one right?
step_softmax = tf.nn.softmax(self(batch_dict, None)[0])
return step_softmax
@classmethod
def create(cls, embeddings, **kwargs):
"""Create the language model
:param embeddings: A set of embeddings used
:param kwargs: see below
:Keyword Arguments:
* *tgt_key* (`str`) -- Which vocabulary is the destination vocabulary
(for example, you might have character inputs, or character + word inputs. The outputs need to be specified)
* *sess* (`tf.compat.v1.Session`) -- Optionally, pass in a session (or one will be created)
* *pdrop* (`float`) -- The dropout probability
* *y* -- Optional target. If this is not passed in, a placeholder gets created
* *hsz* (`int`) -- Number of hidden units per layers
* *unif* (`float`) -- set the weights initializer to small random uniform values
:return: The created model
"""
lm = cls()
lm.src_keys = kwargs.get('src_keys', embeddings.keys())
lm.tgt_key = kwargs.get('tgt_key')
if lm.tgt_key is None:
raise Exception('Need a `tgt_key` to know which source vocabulary should be used for destination')
lm._unserializable.append(lm.tgt_key)
lm._record_state(embeddings, **kwargs)
inputs = {}
if not tf.executing_eagerly():
for k, embedding in embeddings.items():
x = kwargs.get(k, embedding.create_placeholder(name=k))
inputs[k] = x
lm.y = kwargs.get('y', tf.compat.v1.placeholder(tf.int32, [None, None], name="y"))
lm.sess = kwargs.get('sess', tf.compat.v1.Session())
lm.create_layers(embeddings, **kwargs)
if not tf.executing_eagerly():
if lm.requires_state:
lm.zero_state(inputs)
lm.logits, lm.final_state = lm(inputs, lm.initial_state)
else:
lm.logits, _ = lm(inputs, None)
lm.probs = tf.nn.softmax(lm.logits, name="softmax")
return lm
def call(self, inputs: Dict[str, TensorDef], hidden: TensorDef) -> Tuple[TensorDef, TensorDef]:
"""Take the input and produce the best path of labels out
:param inputs: The feature indices for the input
:return: The output and hidden units
"""
def create_layers(self, embeddings, **kwargs):
"""This method defines the model itself, and must be overloaded by derived classes
This function will update `self` with the layers required to execute the `call()` method
:param embeddings: The input feature indices
:param kwargs:
:return:
"""
@classmethod
def load(cls, basename, **kwargs):
"""Reload the model from a graph file and a checkpoint
The model that is loaded is independent of the pooling and stacking layers, making this class reusable
by sub-classes.
:param basename: The base directory to load from
:param kwargs: See below
:Keyword Arguments:
* *sess* -- An optional tensorflow session. If not passed, a new session is
created
:return: A restored model
"""
_state = read_json(basename + '.state')
if not tf.executing_eagerly():
_state['sess'] = kwargs.pop('sess', create_session())
embeddings_info = _state.pop("embeddings")
with _state['sess'].graph.as_default():
embeddings = reload_embeddings(embeddings_info, basename)
for k in embeddings_info:
if k in kwargs:
_state[k] = kwargs[k]
_state['model_type'] = kwargs.get('model_type', 'default')
model = cls.create(embeddings, **_state)
model._state = _state
do_init = kwargs.get('init', True)
if do_init:
init = tf.compat.v1.global_variables_initializer()
model.sess.run(init)
model.saver = tf.compat.v1.train.Saver()
model.saver.restore(model.sess, basename)
else:
_state = read_json(basename + '.state')
_state['model_type'] = kwargs.get('model_type', 'default')
embeddings = {}
embeddings_dict = _state.pop("embeddings")
for key, class_name in embeddings_dict.items():
md = read_json('{}-{}-md.json'.format(basename, key))
embed_args = dict({'vsz': md['vsz'], 'dsz': md['dsz']})
Constructor = eval(class_name)
embeddings[key] = Constructor(key, **embed_args)
model = cls.create(embeddings, **_state)
model._state = _state
model.load_weights(f"{basename}.wgt")
return model
@property
def requires_state(self):
pass
class AbstractGeneratorModel(LanguageModelBase):
def create_layers(self, embeddings, **kwargs):
self.embeddings = self.init_embed(embeddings, **kwargs)
self.embeddings_proj = self.init_embeddings_proj(**kwargs)
self.generator = self.init_generate(**kwargs)
self.output_layer = self.init_output(embeddings, **kwargs)
def call(self, inputs: Dict[str, TensorDef], hidden: TensorDef) -> Tuple[TensorDef, TensorDef]:
emb = self.embed(inputs)
output, hidden = self.generate(emb, hidden)
return self.output_layer(output), hidden
def embed(self, input):
embedded_dropout = self.embeddings(input)
return self.embeddings_proj(embedded_dropout)
def init_embed(self, embeddings: Dict[str, TensorDef], **kwargs) -> BaseLayer:
"""This method creates the "embedding" layer of the inputs, with an optional reduction
:param embeddings: A dictionary of embeddings
:Keyword Arguments: See below
* *embeddings_reduction* (defaults to `concat`) An operator to perform on a stack of embeddings
* *embeddings_dropout = float(kwargs.get('embeddings_dropout', 0.0))
:return: The output of the embedding stack followed by its reduction. This will typically be an output
with an additional dimension which is the hidden representation of the input
"""
reduction = kwargs.get('embeddings_reduction', 'concat')
embeddings_dropout = float(kwargs.get('embeddings_dropout', 0.0))
return EmbeddingsStack({k: embeddings[k] for k in self.src_keys}, embeddings_dropout, reduction=reduction)
def init_embeddings_proj(self, **kwargs):
input_sz = self.embeddings.output_dim
hsz = kwargs.get('hsz', kwargs.get('d_model'))
if hsz != input_sz:
proj = tf.keras.layers.Dense(hsz)
print('Applying a transform from {} to {}'.format(input_sz, hsz))
else:
proj = PassThru(hsz)
return proj
def init_generate(self, **kwargs):
pass
def generate(self, emb, hidden):
return self.generator((emb, hidden))
def init_output(self, embeddings, **kwargs):
self.vsz = embeddings[self.tgt_key].get_vsz()
do_weight_tying = bool(kwargs.get('tie_weights', False))
output_bias = kwargs.get('output_bias', False)
if do_weight_tying:
output = WeightTieDense(embeddings[self.tgt_key], use_bias=output_bias)
else:
output = tf.keras.layers.Dense(self.vsz)
return output
@register_model(task='lm', name='default')
class RNNLanguageModel(AbstractGeneratorModel):
"""RNN-based Language Model built on base class
"""
def __init__(self):
"""Construct an RNNLM
"""
super().__init__()
self.rnntype = 'lstm'
self.initial_state = None
@property
def requires_state(self):
return True
def zero_state(self, inputs):
batchsz = get_shape_as_list(inputs[self.src_keys[0]])[0]
self.initial_state = self.generator.layer.zero_state(batchsz)
def init_generate(self, **kwargs):
"""LSTM-based method for decoding
:param inputs: The outputs of the embeddings
:param kwargs: See above
:return: The layer
"""
pdrop = float(kwargs.get('dropout', 0.5))
layers = kwargs.get('layers', kwargs.get('num_layers', 1))
self.hsz = kwargs.get('hsz', kwargs.get('d_model'))
return WithDropoutOnFirst(LSTMEncoderWithState(self.hsz, self.hsz, layers, pdrop, batch_first=True),
pdrop,
kwargs.get('variational', False))
@register_model(task='lm', name='transformer')
class TransformerLanguageModel(AbstractGeneratorModel):
"""Transformer-based Language Model built on base class
"""
def __init__(self):
"""Construct an TLM
"""
super().__init__()
@property
def requires_state(self):
return False
def init_generate(self, **kwargs):
pdrop = float(kwargs.get('dropout', 0.1))
layers = kwargs.get('layers', kwargs.get('num_layers', 1))
d_model = int(kwargs.get('d_model', kwargs.get('hsz')))
num_heads = kwargs.get('num_heads', 4)
d_ff = int(kwargs.get('d_ff', 4 * d_model))
rpr_k = kwargs.get('rpr_k')
d_k = kwargs.get('d_k')
scale = bool(kwargs.get('scale', True))
activation = kwargs.get('activation', 'gelu')
ffn_pdrop = kwargs.get('ffn_pdrop', 0.0)
layer_norm_eps = kwargs.get('layer_norm_eps', 1e-12)
layer_norms_after = kwargs.get('layer_norms_after', False)
layer_drop = kwargs.get('layer_drop', 0.0)
windowed_ra = kwargs.get('windowed_ra', False)
rpr_value_on = kwargs.get('rpr_value_on', True)
return TransformerEncoderStack(num_heads, d_model=d_model, pdrop=pdrop, scale=scale,
layers=layers, d_ff=d_ff, rpr_k=rpr_k, d_k=d_k,
activation=activation,
ffn_pdrop=ffn_pdrop,
layer_norm_eps=layer_norm_eps,
layer_norms_after=layer_norms_after, windowed_ra=windowed_ra,
rpr_value_on=rpr_value_on,
layer_drop=layer_drop)
def create_mask(self, bth):
max_seqlen = get_shape_as_list(bth)[1]
mask = subsequent_mask(max_seqlen)
return mask
def generate(self, bth, _):
mask = self.create_mask(bth)
return self.generator((bth, mask)), None
@register_model(task='lm', name='transformer-mlm')
class TransformerMaskedLanguageModel(TransformerLanguageModel):
def create_mask(self, bth):
return None
@register_model(task='lm', name='gmlp-mlm')
class GatedMLPLanguageModel(AbstractGeneratorModel):
def __init__(self):
super().__init__()
@property
def requires_state(self):
return False
def init_generate(self, **kwargs):
pdrop = float(kwargs.get('dropout', 0.1))
layers = kwargs.get('layers', kwargs.get('num_layers', 1))
d_model = int(kwargs.get('d_model', kwargs.get('hsz')))
d_ff = int(kwargs.get('d_ff', 4 * d_model))
activation = kwargs.get('activation', 'gelu')
ffn_pdrop = kwargs.get('ffn_pdrop', 0.0)
layer_norm_eps = kwargs.get('layer_norm_eps', 1e-12)
layer_drop = kwargs.get('layer_drop', 0.0)
nctx = int(kwargs.get('nctx', 256))
return GatedMLPEncoderStack(d_model=d_model, pdrop=pdrop,
layers=layers, nctx=nctx,
d_ff=d_ff,
activation=activation,
ffn_pdrop=ffn_pdrop,
layer_norm_eps=layer_norm_eps,
layer_drop=layer_drop)
def create_mask(self, bth):
return None
def generate(self, bth, _):
mask = self.create_mask(bth)
return self.generator((bth, mask)), None
| dpressel/baseline | baseline/tf/lm/model.py | Python | apache-2.0 | 17,427 |
'''
urllib2 - Library for opening URLs
A library for opening URLs that can be extended by defining custom protocol
handlers.
The urllib2 module defines functions and classes which help in opening URLs
(mostly HTTP) in a complex world - basic and digest authentication,
redirections, cookies and more.
The urllib2 module provides an updated API for using internet resources
identified by URLs. It is designed to be extended by individual applications to
support new protocols or add variations to existing protocols (such as handling
HTTP basic authentication).
https://pymotw.com/2/urllib2/
'''
import urllib
import urllib2
# EXAMPLE 1: HTTP POST:
# ==============================================================================
# To POST form-encoded data to the remote server, instead of using GET, pass the
# encoded query arguments as data to urlopen().
query_args = { 'q':'query string', 'foo':'bar' }
encoded_args = urllib.urlencode(query_args)
url = 'http://localhost:8080/'
print urllib2.urlopen(url, encoded_args).read()
# EXAMPLE 2: Working with Requests Directly:
# ==============================================================================
# urlopen() is a convenience function that hides some of the details of how the
# request is made and handled for you. For more precise control, you may want to
# instantiate and use a Request object directly.
# EXAMPLE 3: Adding Outgoing Headers:
# ==============================================================================
# As the examples above illustrate, the default User-agent header value is made
# up of the constant Python-urllib, followed by the Python interpreter version.
# If you are creating an application that will access other people’s web
# resources, it is courteous to include real user agent information in your
# requests, so they can identify the source of the hits more easily. Using a
# custom agent also allows them to control crawlers using a robots.txt file
# (see robotparser).
import urllib2
request = urllib2.Request('http://localhost:8080/')
request.add_header('User-agent', 'PyMOTW (http://www.doughellmann.com/PyMOTW/)')
response = urllib2.urlopen(request)
data = response.read()
print data
# EXAMPLE 4: Posting Form Data:
# ==============================================================================
# You can set the outgoing data on the Request to post it to the server.
import urllib
import urllib2
query_args = { 'q':'query string', 'foo':'bar' }
request = urllib2.Request('http://localhost:8080/')
print 'Request method before data:', request.get_method()
request.add_data(urllib.urlencode(query_args))
print 'Request method after data :', request.get_method()
request.add_header('User-agent', 'PyMOTW (http://www.doughellmann.com/PyMOTW/)')
print
print 'OUTGOING DATA:'
print request.get_data()
print
print 'SERVER RESPONSE:'
print urllib2.urlopen(request).read()
# EXAMPLE 5: Uploading Files:
# ==============================================================================
# Encoding files for upload requires a little more work than simple forms.
# A complete MIME message needs to be constructed in the body of the request,
# so that the server can distinguish incoming form fields from uploaded files.
import itertools
import mimetools
import mimetypes
from cStringIO import StringIO
import urllib
import urllib2
class MultiPartForm(object):
'''Accumulate the data to be used when posting a form.'''
def __init__(self):
self.form_fields = []
self.files = []
self.boundary = mimetools.choose_boundary()
return
def get_content_type(self):
return 'multipart/form-data; boundary=%s' % self.boundary
def add_field(self, name, value):
"""Add a simple field to the form data."""
self.form_fields.append((name, value))
return
def add_file(self, fieldname, filename, fileHandle, mimetype=None):
"""Add a file to be uploaded."""
body = fileHandle.read()
if mimetype is None:
mimetype = mimetypes.guess_type(filename)[0] or 'application/octet-stream'
self.files.append((fieldname, filename, mimetype, body))
return
def __str__(self):
'''Return a string representing the form data, including attached files.'''
# Build a list of lists, each containing "lines" of the
# request. Each part is separated by a boundary string.
# Once the list is built, return a string where each
# line is separated by '\r\n'.
parts = []
part_boundary = '--' + self.boundary
# Add the form fields
parts.extend(
[ part_boundary,
'Content-Disposition: form-data; name="%s"' % name,
'',
value,
]
for name, value in self.form_fields
)
# Add the files to upload
parts.extend(
[ part_boundary,
'Content-Disposition: file; name="%s"; filename="%s"' % \
(field_name, filename),
'Content-Type: %s' % content_type,
'',
body,
]
for field_name, filename, content_type, body in self.files
)
# Flatten the list and add closing boundary marker,
# then return CR+LF separated data
flattened = list(itertools.chain(*parts))
flattened.append('--' + self.boundary + '--')
flattened.append('')
return '\r\n'.join(flattened)
if __name__ == '__main__':
# Create the form with simple fields
form = MultiPartForm()
form.add_field('firstname', 'Doug')
form.add_field('lastname', 'Hellmann')
# Add a fake file
form.add_file('biography', 'bio.txt',
fileHandle=StringIO('Python developer and blogger.'))
# Build the request
request = urllib2.Request('http://localhost:8080/')
request.add_header('User-agent', 'PyMOTW (http://www.doughellmann.com/PyMOTW/)')
body = str(form)
request.add_header('Content-type', form.get_content_type())
request.add_header('Content-length', len(body))
request.add_data(body)
print
print 'OUTGOING DATA:'
print request.get_data()
print
print 'SERVER RESPONSE:'
print urllib2.urlopen(request).read()
# EXAMPLE 6: Custom Protocol Handlers:
# ==============================================================================
# urllib2 has built-in support for HTTP(S), FTP, and local file access.
# If you need to add support for other URL types, you can register your own
# protocol handler to be invoked as needed. For example, if you want to support
# URLs pointing to arbitrary files on remote NFS servers, without requiring your
# users to mount the path manually, would create a class derived from
# BaseHandler and with a method nfs_open().
# The protocol open() method takes a single argument, the Request instance, and
# it should return an object with a read() method that can be used to read the
# data, an info() method to return the response headers, and geturl() to return
# the actual URL of the file being read. A simple way to achieve that is to
# create an instance of urllib.addurlinfo, passing the headers, URL, and open
# file handle in to the constructor.
import mimetypes
import os
import tempfile
import urllib
import urllib2
class NFSFile(file):
def __init__(self, tempdir, filename):
self.tempdir = tempdir
file.__init__(self, filename, 'rb')
def close(self):
print
print 'NFSFile:'
print ' unmounting %s' % self.tempdir
print ' when %s is closed' % os.path.basename(self.name)
return file.close(self)
class FauxNFSHandler(urllib2.BaseHandler):
def __init__(self, tempdir):
self.tempdir = tempdir
def nfs_open(self, req):
url = req.get_selector()
directory_name, file_name = os.path.split(url)
server_name = req.get_host()
print
print 'FauxNFSHandler simulating mount:'
print ' Remote path: %s' % directory_name
print ' Server : %s' % server_name
print ' Local path : %s' % tempdir
print ' File name : %s' % file_name
local_file = os.path.join(tempdir, file_name)
fp = NFSFile(tempdir, local_file)
content_type = mimetypes.guess_type(file_name)[0] or 'application/octet-stream'
stats = os.stat(local_file)
size = stats.st_size
headers = { 'Content-type': content_type,
'Content-length': size,
}
return urllib.addinfourl(fp, headers, req.get_full_url())
if __name__ == '__main__':
tempdir = tempfile.mkdtemp()
try:
# Populate the temporary file for the simulation
with open(os.path.join(tempdir, 'file.txt'), 'wt') as f:
f.write('Contents of file.txt')
# Construct an opener with our NFS handler
# and register it as the default opener.
opener = urllib2.build_opener(FauxNFSHandler(tempdir))
urllib2.install_opener(opener)
# Open the file through a URL.
response = urllib2.urlopen('nfs://remote_server/path/to/the/file.txt')
print
print 'READ CONTENTS:', response.read()
print 'URL :', response.geturl()
print 'HEADERS:'
for name, value in sorted(response.info().items()):
print ' %-15s = %s' % (name, value)
response.close()
finally:
os.remove(os.path.join(tempdir, 'file.txt'))
os.removedirs(tempdir) | rolandovillca/python_basis | web/client_post_with_urllib2.py | Python | mit | 9,620 |
from errbot import BotPlugin
class Circular4(BotPlugin):
pass
| mrshu/err | tests/dependent_plugins/circular4.py | Python | gpl-3.0 | 68 |
class Solution(object):
def countAndSay(self, n):
"""
:type n: int
:rtype: str
"""
if 1 == n:
return "1"
origin = self.countAndSay(n - 1)
current = origin[0]
count = 1
target = ""
for val in origin[1:]:
if current == val:
count += 1
else:
target += str(count) + current
current = val
count = 1
target += str(count) + current
print target
return target
if "__main__" == __name__:
s = Solution()
print s.countAndSay(30)
print "1, 11, 21, 1211, 111221, 31221, 13112211, 1113212221, 311312113211 is expected"
| pandaoknight/leetcode | easy/count-and-say/main.py | Python | gpl-2.0 | 727 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015-2020 Stephane Caron <[email protected]>
#
# This file is part of pymanoid <https://github.com/stephane-caron/pymanoid>.
#
# pymanoid is free software: you can redistribute it and/or modify it under the
# terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# pymanoid is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# pymanoid. If not, see <http://www.gnu.org/licenses/>.
"""
This example shows how to compute the contact wrench cone (CWC), a generalized
multi-contact friction cone. See [Caron15]_ for details.
"""
import IPython
from numpy import array
import pymanoid
from pymanoid import Stance
def print_contact(name, contact):
print("%s:" % name)
print("- pos = %s" % repr(contact.p))
print("- rpy = %s" % repr(contact.rpy))
print("- half-length = %s" % repr(contact.shape[0]))
print("- half-width = %s" % repr(contact.shape[1]))
print("- friction = %f" % contact.friction)
print("")
if __name__ == "__main__":
sim = pymanoid.Simulation(dt=0.03)
robot = pymanoid.robots.JVRC1('JVRC-1.dae', download_if_needed=True)
sim.set_viewer()
sim.viewer.SetCamera([
[0.60587192, -0.36596244, 0.70639274, -2.4904027],
[-0.79126787, -0.36933163, 0.48732874, -1.6965636],
[0.08254916, -0.85420468, -0.51334199, 2.79584694],
[0., 0., 0., 1.]])
robot.set_transparency(0.25)
stance = Stance.from_json('stances/triple.json')
stance.bind(robot)
robot.ik.solve()
sim.schedule(robot.ik)
sim.start()
p = array([0., 0., 0.])
CWC_O = stance.compute_wrench_inequalities(p)
print_contact("Left foot", stance.left_foot)
print_contact("Right foot", stance.right_foot)
print("Contact Wrench Cone at %s:" % str(p))
print("- has %d lines" % CWC_O.shape[0])
if IPython.get_ipython() is None:
IPython.embed()
| stephane-caron/pymanoid | examples/contact_stability/wrench_friction_cone.py | Python | gpl-3.0 | 2,284 |
"""
[2015-06-01] Challenge #217 [Easy] Lumberjack Pile Problem
https://www.reddit.com/r/dailyprogrammer/comments/3840rp/20150601_challenge_217_easy_lumberjack_pile/
#Description:
The famous lumberjacks of /r/dailyprogrammer are well known to be weird and interesting. But we always enjoy solving
their problems with some code.
For today's challenge the lumberjacks pile their logs from the forest in a grid n x n. Before using us to solve their
inventory woes they randomly just put logs in random piles. Currently the pile sizes vary and they want to even them
out. So let us help them out.
#Input:
You will be given the size of the storage area. The number of logs we have to put into storage and the log count in
each pile currently in storage. You can either read it in from the user or hardcode this data.
##Input Example:
3
7
1 1 1
2 1 3
1 4 1
So the size is 3 x 3. We have 7 logs to place and we see the 3 x 3 grid of current size of the log piles.
#Log Placement:
We want to fill the smallest piles first and we want to evenly spread out the logs. So in the above example we have 7
logs. The lowest log count is 1. So starting with the first pile in the upper left
and going left-right on each row we place 1 log in each 1 pile until all the current 1 piles get a log. (or until we
run out). After that if we have more logs we then have to add logs to piles with 2 (again moving left-right on each
row.)
Keep in mind lumberjacks do not want to move logs already in a pile. To even out the storage they will do it over time
by adding new logs to piles. But they are also doing this in an even distribution.
Once we have placed the logs we need to output the new log count for the lumberjacks to tack up on their cork board.
#Output:
Show the new n x n log piles after placing the logs evenly in the storage area.
Using the example input I would generate the following:
##example output:
3 2 2
2 2 3
2 4 2
Notice we had 6 piles of 1s. Each pile got a log. We still have 1 left. So then we had to place logs in piles of size
2. So the first pile gets the last log and becomes a 3 and we run out of logs and we are done.
#Challenge inputs:
Please solve the challenge using these inputs:
##Input 1:
4
200
15 12 13 11
19 14 8 18
13 14 17 15
7 14 20 7
##Input 2:
15
2048
5 15 20 19 13 16 5 2 20 5 9 15 7 11 13
17 13 7 17 2 17 17 15 4 17 4 14 8 2 1
13 8 5 2 9 8 4 2 2 18 8 12 9 10 14
18 8 13 13 4 4 12 19 3 4 14 17 15 20 8
19 9 15 13 9 9 1 13 14 9 10 20 17 20 3
12 7 19 14 16 2 9 5 13 4 1 17 9 14 19
6 3 1 7 14 3 8 6 4 18 13 16 1 10 3
16 3 4 6 7 17 7 1 10 10 15 8 9 14 6
16 2 10 18 19 11 16 6 17 7 9 13 10 5 11
12 19 12 6 6 9 13 6 13 12 10 1 13 15 14
19 18 17 1 10 3 1 6 14 9 10 17 18 18 7
7 2 10 12 10 20 14 13 19 11 7 18 10 11 12
5 16 6 8 20 17 19 17 14 10 10 1 14 8 12
19 10 15 5 11 6 20 1 5 2 5 10 5 14 14
12 7 15 4 18 11 4 10 20 1 16 18 7 13 15
## Input 3:
1
41
1
## Input 4:
12
10000
9 15 16 18 16 2 20 2 10 12 15 13
20 6 4 15 20 16 13 6 7 12 12 18
11 11 7 12 5 7 2 14 17 18 7 19
7 14 4 19 8 6 4 11 14 13 1 4
3 8 3 12 3 6 15 8 15 2 11 9
16 13 3 9 8 9 8 9 18 13 4 5
6 4 18 1 2 14 8 19 20 11 14 2
4 7 12 8 5 2 19 4 1 10 10 14
7 8 3 11 15 11 2 11 4 17 6 18
19 8 18 18 15 12 20 11 10 9 3 16
3 12 3 3 1 2 9 9 13 11 18 13
9 2 12 18 11 13 18 15 14 20 18 10
#Other Lumberjack Problems:
* [Hard - Simulated Ecology - The
Forest](http://www.reddit.com/r/dailyprogrammer/comments/27h53e/662014_challenge_165_hard_simulated_ecology_the/)
* [Hard - Lumberjack Floating Log Problem]
(http://www.reddit.com/r/dailyprogrammer/comments/2lljyq/11052014_challenge_187_hard_lumberjack_floating/)
"""
def main():
pass
if __name__ == "__main__":
main()
| DayGitH/Python-Challenges | DailyProgrammer/DP20150601A.py | Python | mit | 4,068 |
"""
.. module:: radical.pilot.controller.pilot_launcher_worker
.. moduleauthor:: Ole Weidner <[email protected]>
"""
__copyright__ = "Copyright 2013-2014, http://radical.rutgers.edu"
__license__ = "MIT"
from unit_manager_controller import UnitManagerController
from pilot_manager_controller import PilotManagerController
from input_file_transfer_worker import InputFileTransferWorker
from output_file_transfer_worker import OutputFileTransferWorker
| JensTimmerman/radical.pilot | src/radical/pilot/controller/__init__.py | Python | mit | 462 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('researchhub', '0009_add_description'),
]
operations = [
migrations.AddField(
model_name='skilledstudent',
name='fields_of_interest',
field=models.CharField(default=b'', help_text=b'In what fields you are interested?', max_length=250),
),
]
| enjaz/enjaz | researchhub/migrations/0010_skilledstudent_fields_of_interest.py | Python | agpl-3.0 | 484 |
from __future__ import absolute_import
import six
import sys
from functools import wraps
from .exceptions import TransactionAborted
from .helpers import can_reconnect
def auto_reconnect_cursor(func):
"""
Attempt to safely reconnect when an error is hit that resembles the
bouncer disconnecting the client due to a timeout/etc during a cursor
execution.
"""
@wraps(func)
def inner(self, *args, **kwargs):
try:
return func(self, *args, **kwargs)
except Exception as e:
if not can_reconnect(e):
raise
self.db.close(reconnect=True)
self.cursor = self.db._cursor()
return func(self, *args, **kwargs)
return inner
def auto_reconnect_connection(func):
"""
Attempt to safely reconnect when an error is hit that resembles the
bouncer disconnecting the client due to a timeout/etc.
"""
@wraps(func)
def inner(self, *args, **kwargs):
try:
return func(self, *args, **kwargs)
except Exception as e:
if not can_reconnect(e):
raise
self.close(reconnect=True)
return func(self, *args, **kwargs)
return inner
def capture_transaction_exceptions(func):
"""
Catches database errors and reraises them on subsequent errors that throw
some cruft about transaction aborted.
"""
def raise_the_exception(conn, exc):
if 'current transaction is aborted, commands ignored until end of transaction block' in six.text_type(exc):
exc_info = getattr(conn, '_last_exception', None)
if exc_info is None:
raise
new_exc = TransactionAborted(sys.exc_info(), exc_info)
six.reraise(new_exc.__class__, new_exc, exc_info[2])
conn._last_exception = sys.exc_info()
raise
@wraps(func)
def inner(self, *args, **kwargs):
try:
return func(self, *args, **kwargs)
except Exception as e:
raise_the_exception(self.db, e)
return inner
def less_shitty_error_messages(func):
"""
Wraps functions where the first param is a SQL statement and enforces
any exceptions thrown will also contain the statement in the message.
"""
@wraps(func)
def inner(self, sql, *args, **kwargs):
try:
return func(self, sql, *args, **kwargs)
except Exception as e:
exc_info = sys.exc_info()
msg = '{}\nSQL: {}'.format(
repr(e),
sql,
)
six.reraise(exc_info[0], exc_info[0](msg), exc_info[2])
return inner
| JamesMura/sentry | src/sentry/db/postgres/decorators.py | Python | bsd-3-clause | 2,675 |
from django.db import models
from django.conf import settings
from django.utils.encoding import smart_text
class Category(models.Model):
name=models.CharField(max_length=255)
class Meta:
verbose_name_plural = "Categories"
def __str__(self):
return smart_text(self.name)
class PlaceManager(models.Manager): # filtreleme kısmı çok fazla kullanacagımız fonksiyonları manager yaratıp onun
# içine yazıyoruz her seferinde tekrar yazmamak için
def set_wifi_true(self):
return self.get_queryset().update(has_wifi=True)
def get_queryset(self):
gets=super(PlaceManager,self).get_queryset()
return gets.filter(user__is_active=True)
def active_places(self):
return self.get_queryset().filter(
is_active=True,
user__is_active=True
)
class Place(models.Model):
user= models.ForeignKey(settings.AUTH_USER_MODEL,
related_name="added_places")# kullanıının eklediği mekanları getiriyor
name=models.CharField(max_length=255)
is_active= models.BooleanField(default=False)
coordinates= models.CharField(max_length=255,null=True,blank=False) # null True veritabanına null yazılabilir
category= models.ForeignKey(Category,blank=True,null=True) # blank True web sayfasında veri girerken boş bırakma
has_wifi= models.BooleanField(default=False)
telephone=models.CharField(max_length=255,blank=True,null=True)
description = models.TextField(blank=True,null=True)
likes = models.ManyToManyField(settings.AUTH_USER_MODEL,
blank=True,
related_name = 'liked_places') # kullanıının begendigi mekanları getiriyor
objects=PlaceManager()
all_objects=models.Manager()
def __str__(self):
return smart_text(self.name)
@models.permalink
def get_absolute_url(self): # değişiklik yaptıgımızda hata almamak için
return ("place_detail",(self.id,))
def review_count(self):
return self.review_set.count()
class Review(models.Model):
user= models.ForeignKey(settings.AUTH_USER_MODEL)
place = models.ForeignKey(Place)
comment = models.TextField(blank=True,null=True)
vote = models.IntegerField(
default=3,
choices=(
(1,"berbat"),
(2,"kötü"),
(3,"meh"),
(4,"uh"),
(5,"yıkılıyor")
)
)
def __str__(self):
return smart_text(self.comment)
class Media(models.Model):
place = models.ForeignKey(Place)
image = models.ImageField(upload_to="places")
class Meta:
verbose_name_plural = "Media"
def __str__(self):
return smart_text(self.image.url)
| oktayuyar/Velespi | places/models.py | Python | gpl-3.0 | 2,784 |
# draw a stairway
import sys
num_steps_str = sys.argv[1]
if len(num_steps_str) > 0 and num_steps_str.isdigit():
num_steps = int(num_steps_str)
for i in range(1, num_steps + 1):
spaceNum = num_steps - i
print(f"{' '*spaceNum}{'#'*i}")
# for i in range(1, steps + 1):
# space = ' ' * (steps - i)
# bar = '#' * i
# print(f'{space}{bar}')
| Lexa-san/coursera-mailru-mfti-python | src/w01/playground/task02.py | Python | gpl-3.0 | 375 |
# -:- coding: utf-8 -:-#
"""
A resolver to query top-level domains via publicsuffix.org.
"""
from __future__ import absolute_import
NAME = "publicsuffix"
HELP = "a resolver to query top-level domains via publicsuffix.org"
DESC = """
This resolver returns a PTR record pointing to the top-level domain of the
hostname in question. When the --txt option is given, it will also return
additional informational TXT records.
The list of current top-level domains can be explicitly downloaded upon startup
via the --fetch argument.
"""
import dns.message
import logging
import sys
# remove current directory from path to load a module with the same name as us
oldpath, sys.path = sys.path, sys.path[1:]
import publicsuffix
sys.path = oldpath
"""
Module-level configuration
"""
TTL = 14400 # serve all records with this TTL
SERVE_TXT = True # serve additional TXT records
LIST_FETCH = False # download fresh copy of public suffix list
LIST_URL = "http://mxr.mozilla.org/mozilla-central/source/netwerk/dns/effective_tld_names.dat?raw=1"
log = logging.getLogger(__name__)
psl = publicsuffix.PublicSuffixList()
def configure_parser(parser):
"""
Configure provided argparse subparser with module-level options.
Use the set_defaults() construct as a callback for storing the parsed arguments.
"""
def set_defaults(args):
global TTL, SERVE_TXT, LIST_FETCH, LIST_URL
TTL = args.publicsuffix_ttl
SERVE_TXT = args.publicsuffix_txt
if args.publicsuffix_fetch in (True, False):
LIST_FETCH = args.publicsuffix_fetch
else:
LIST_FETCH = True
LIST_URL = args.publicsuffix_fetch
# download TLD list
if LIST_FETCH:
pass
parser.set_defaults(func=set_defaults)
parser.add_argument("--ttl", dest="publicsuffix_ttl", type=int,
default=TTL, metavar="TTL",
help="TTL to use for all records ")
parser.add_argument("--fetch", dest="publicsuffix_fetch", nargs="?",
default=LIST_FETCH, const=True, metavar="URL",
help="fetch new list on start, from given URL if provided")
parser.add_argument("--notxt", dest="publicsuffix_txt", action="store_false",
default=SERVE_TXT,
help="do not serve additional TXT records")
return parser
def validate(msg):
"""
Filter messages that are bad or we can't handle.
Return a DNS rcode describing the problem.
"""
opcode = msg.opcode()
# we only support queries
if opcode != dns.opcode.QUERY:
return dns.rcode.NOTIMP
# # we do not allow recursion
# if msg.flags & dns.flags.RD:
# return dns.rcode.REFUSED
# only allow single question (qdcount=1)
# @TODO: allow multiple questions?
if len(msg.question) != 1:
return dns.rcode.FORMERR
return dns.rcode.NOERROR
def query(msg):
"""
Return answer to provided DNS question.
Create appropriate skeleton response message via dns.message.make_response(msg).
"""
res = dns.message.make_response(msg)
# validate query
rcode = validate(msg)
res.set_rcode(rcode)
# stop here if didn't validate
if rcode != dns.rcode.NOERROR:
return res
# this is just one query in reality, really, but let's not assume that
for query in msg.question:
name = query.name.to_unicode(omit_final_dot=True)
# only deal with PTR queries
if query.rdtype not in (dns.rdatatype.PTR, dns.rdatatype.ANY):
res.set_rcode(dns.rcode.NXDOMAIN)
log.info("Skipping query type %d", query.rdtype)
continue
try:
suffix = psl.get_public_suffix(name)
except:
res.set_rcode(dns.rcode.SERVFAIL)
log.exception("Oddness while looking up suffix")
# don't process further questions since we've set rcode
break
if suffix:
suffix += "."
# answer section
rdata = suffix
# https://github.com/rthalley/dnspython/issues/44
try:
# dnspython3
rrset = dns.rrset.from_text(query.name, TTL,
dns.rdataclass.IN, dns.rdatatype.PTR,
rdata)
except AttributeError:
# dnspython2
rrset = dns.rrset.from_text(query.name, TTL,
dns.rdataclass.IN, dns.rdatatype.PTR,
rdata.encode("idna"))
res.answer.append(rrset)
if SERVE_TXT:
# additional section
tld = query.name.split(2)[-1].to_text(omit_final_dot=True)
rdata = '"see: http://en.wikipedia.org/wiki/.{}"'.format(tld)
# https://github.com/rthalley/dnspython/issues/44
try:
# python3
rrset = dns.rrset.from_text(suffix, TTL,
dns.rdataclass.IN, dns.rdatatype.TXT,
rdata)
except:
# python2
rrset = dns.rrset.from_text(suffix, TTL,
dns.rdataclass.IN, dns.rdatatype.TXT,
rdata.encode("latin1"))
res.additional.append(rrset)
return res
| skion/junkdns | src/resolvers/publicsuffix.py | Python | mit | 5,429 |
"""
Cloudbrain's OO data model.
"""
class MetricBuffer(object):
def __init__(self, name, num_channels, buffer_size):
self.name = name
self.num_channels = num_channels
self.metric_names = ["channel_%s" % i for i in range(self.num_channels)]
self.metric_names.append("timestamp")
self.buffer_size = buffer_size
self.data_buffer = []
def _validate_datum(self, datum):
"""
Validate if the datum being sent is following the right schema.
:param datum: MetricBuffer data point. E.g:
{"timestamp": <float>, "metric_0": <float>, ..., "metric_7": <float>}
:type datum: dict
"""
if sorted(datum.keys()) != sorted(self.metric_names):
raise ValueError("MetricBuffer keys should be %s but are %s" % (
self.metric_names, datum.keys()))
def add(self, datum):
"""
Append datum to the buffer.
:param datum: metric data point with the following format:
{"timestamp": <float>, "metric_0": <float>, ..., "metric_7": <float>}
:type datum: dict
:returns: (list of dicts) 'None' if the buffer isn't full yet.
A list of dicts otherwise. E.g:
[
{"timestamp": <float>, "metric_0": <float>, ..., "metric_7": <float>},
...
{"timestamp": <float>, "metric_0": <float>, ..., "metric_7": <float>}
]
:rtype: list of dicts
"""
self._validate_datum(datum)
self.data_buffer.append(datum)
if len(self.data_buffer) >= self.buffer_size:
data_buffer = self.data_buffer
self.data_buffer = []
return data_buffer
else:
return None
| marionleborgne/cloudbrain | src/cloudbrain/core/model.py | Python | agpl-3.0 | 1,620 |
from mamba import description, context, before, it
from expects import expect, equal, contain
from doublex_expects import have_been_called_with
from doublex import Spy
from spec.object_mother import *
from mamba import reporter, formatters, example_group
with description(reporter.Reporter) as self:
with before.each:
self.example = an_example()
self.formatter = Spy(formatters.Formatter)
self.reporter = reporter.Reporter(self.formatter)
self.reporter.start()
with context('when event started'):
with before.each:
self.reporter.example_started(self.example)
with it('notifies event example started to listeners'):
expect(self.formatter.example_started).to(have_been_called_with(self.example))
with it('increases example counter'):
expect(self.reporter.example_count).to(equal(1))
with context('when event passed'):
with it('notifies event example passed to listeners'):
self.reporter.example_passed(self.example)
expect(self.formatter.example_passed).to(have_been_called_with(self.example))
with context('when event failed'):
with before.each:
self.reporter.example_failed(self.example)
with it('notifies event example failed to listeners'):
expect(self.formatter.example_failed).to(have_been_called_with(self.example))
with it('increases failed counter'):
expect(self.reporter.failed_count).to(equal(1))
with it('keeps failed example'):
self.reporter.example_failed(self.example)
expect(self.reporter.failed_examples).to(contain(self.example))
with context('when event pending'):
with it('notifies event pending to listeners'):
self.reporter.example_pending(self.example)
expect(self.formatter.example_pending).to(have_been_called_with(self.example))
with it('increases pending counter when example started'):
self.reporter.example_pending(self.example)
expect(self.reporter.pending_count).to(equal(1))
with context('when reporting events for an example group'):
with before.each:
self.example_group = an_example_group()
with it('notifies event example group started to listeners'):
self.reporter.example_group_started(self.example_group)
expect(self.formatter.example_group_started).to(have_been_called_with(self.example_group))
with it('notifies event example group finished to listeners'):
self.reporter.example_group_finished(self.example_group)
expect(self.formatter.example_group_finished).to(have_been_called_with(self.example_group))
with it('notifies exent example group pending to listeners'):
self.example_group = a_pending_example_group()
self.reporter.example_group_pending(self.example_group)
expect(self.formatter.example_group_pending).to(have_been_called_with(self.example_group))
with context('when finishing'):
with it('notifies summary to listeners'):
self.reporter.finish()
expect(self.formatter.summary).to(have_been_called_with(
self.reporter.duration,
self.reporter.example_count,
self.reporter.failed_count,
self.reporter.pending_count))
with it('notifies failed examples to listeners'):
self.reporter.finish()
expect(self.formatter.failures).to(have_been_called_with(self.reporter.failed_examples))
| nestorsalceda/mamba | spec/reporter_spec.py | Python | mit | 3,622 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import logging
import rasa_core.version
logging.getLogger(__name__).addHandler(logging.NullHandler())
__version__ = rasa_core.version.__version__
| deepak02/rasa_core | rasa_core/__init__.py | Python | apache-2.0 | 299 |
import random
from test import *
from branch import *
INSN = 'and eor sub rsb add adc sbc rsc tst teq cmp cmn orr mov bic mvn'.split()
NORN = 'mov mvn'.split()
NORD = 'cmp cmn tst teq'.split()
def rotate(val, c):
return ((val >> c) | (val << (32 - c))) & 0xffffffff
def test(insn, s, flags, rd, rn, rnval, imm8, rot):
name = 'test_dp_imm_%s' % tn()
cleanup = asm_wrap(name, rd, {rn:rnval}, flags)
print '%s_tinsn:' % name
if 1:
if insn in NORN:
print ' %s%s %s, #%i, %i' % (insn, s, rd, imm8, rot)
elif insn in NORD:
print ' %s %s, #%i, %i' % (insn, rn, imm8, rot)
else:
print ' %s%s %s, %s, #%i, %i' % (insn, s, rd, rn, imm8, rot)
else:
v = rotate(imm8, rot)
if insn in NORN:
print ' %s%s %s, #%i // %x ror %i ' % (insn, s, rd, v, imm8, rot)
elif insn in NORD:
print ' %s %s, #%i // %x ror %i ' % (insn, rn, v, imm8, rot)
else:
print ' %s%s %s, %s, #%i // %x ror %i ' % (insn, s, rd, rn, v, imm8, rot)
cleanup()
def iter_cases():
while True:
yield (random.choice(INSN), random.choice(['s', '']),
random.randint(0, 0x1f), random.choice(T32REGS),
random.choice(ALLREGS), random.randint(0, 0xffffffff),
random.randint(0, 0xff), random.randint(0, 0xf) * 2)
print ' .arm'
tests(test, iter_cases(), 300)
| Samsung/ADBI | arch/arm/tests/arm_dp_imm.py | Python | apache-2.0 | 1,486 |
from exporters.logger.base_logger import TransformLogger
from exporters.pipeline.base_pipeline_item import BasePipelineItem
class BaseTransform(BasePipelineItem):
"""
This module receives a batch and writes it where needed. It can implement the following methods:
"""
def __init__(self, options, metadata=None):
super(BaseTransform, self).__init__(options, metadata)
self.logger = TransformLogger({
'log_level': options.get('log_level'),
'logger_name': options.get('logger_name')
})
def transform_batch(self, batch):
"""
Receives the batch, transforms it, and returns it.
"""
raise NotImplementedError
def set_metadata(self, key, value, module='transform'):
super(BaseTransform, self).set_metadata(key, value, module)
def update_metadata(self, data, module='transform'):
super(BaseTransform, self).update_metadata(data, module)
def get_metadata(self, key, module='transform'):
return super(BaseTransform, self).get_metadata(key, module)
def get_all_metadata(self, module='transform'):
return super(BaseTransform, self).get_all_metadata(module)
| scrapinghub/exporters | exporters/transform/base_transform.py | Python | bsd-3-clause | 1,200 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
Created on 2013-03-19
@author: Martin H. Bramwell
'''
| martinhbramwell/GData_OpenERP_Data_Pump | models/__init__.py | Python | agpl-3.0 | 106 |
#!/usr/bin/python -O
# This file is part of ranger, the console file manager. (coding: utf-8)
# License: GNU GPL version 3, see the file "AUTHORS" for details.
# =====================
# This embedded bash script can be executed by sourcing this file.
# It will cd to ranger's last location after you exit it.
# The first argument specifies the command to run ranger, the
# default is simply "ranger". (Not this file itself!)
# The other arguments are passed to ranger.
"""":
tempfile="$(mktemp -t tmp.XXXXXX)"
ranger="${1:-ranger}"
test -z "$1" || shift
"$ranger" --choosedir="$tempfile" "${@:-$(pwd)}"
returnvalue=$?
test -f "$tempfile" &&
if [ "$(cat -- "$tempfile")" != "$(echo -n `pwd`)" ]; then
cd "$(cat "$tempfile")"
fi
rm -f -- "$tempfile"
return $returnvalue
"""
from __future__ import (absolute_import, division, print_function)
import sys
# Need to find out whether or not the flag --clean was used ASAP,
# because --clean is supposed to disable bytecode compilation
ARGV = sys.argv[1:sys.argv.index('--')] if '--' in sys.argv else sys.argv[1:]
sys.dont_write_bytecode = '-c' in ARGV or '--clean' in ARGV
# Start ranger
import ranger # NOQA pylint: disable=import-self,wrong-import-position
sys.exit(ranger.main()) # pylint: disable=no-member
| Vifon/ranger | ranger.py | Python | gpl-3.0 | 1,266 |
# import sys
# import unittest
# from asq.queryables import Queryable, identity
# from asq.test.test_queryable import times, inc_chr, times_two
#
# if not sys.platform == 'cli':
#
#
# class TestParallelQueryable(unittest.TestCase):
#
# def test_parallel_select(self):
# a = [27, 74, 18, 48, 57, 97, 76, 20, 91, 8, 80, 59, 20, 32, 58, 12, 74, 78, 4]
# with Queryable(a) as q:
# b = q.as_parallel().select(times_two).to_list()
# c = [54, 148, 36, 96, 114, 194, 152, 40, 182, 16, 160, 118, 40, 64, 116, 24, 148, 156, 8]
# self.assertEqual(len(b), len(c))
# self.assertEqual(set(b), set(c))
#
# def test_parallel_select_with_index_finite(self):
# a = [27, 74, 18, 48, 57, 97, 76, 20, 91, 8, 80, 59, 20, 32, 58, 12, 74, 78, 4]
# with Queryable(a) as q:
# b = q.as_parallel().select_with_index(times).to_list()
# c = [0, 74, 36, 144, 228, 485, 456, 140, 728, 72, 800, 649, 240, 416, 812, 180, 1184, 1326, 72]
# self.assertEqual(len(b), len(c))
# self.assertEqual(set(b), set(c))
#
# def test_parallel_select_many_projector_finite(self):
# a = ['fox', 'kangaroo', 'bison', 'bear']
# with Queryable(a) as q:
# b = q.as_parallel().select_many(identity).to_list()
# c = ['f', 'o', 'x', 'k', 'a', 'n', 'g', 'a', 'r', 'o', 'o', 'b', 'i', 's', 'o', 'n', 'b', 'e', 'a', 'r']
# self.assertEqual(sorted(b), sorted(c))
#
# def test_parallel_select_many_projector_selector_finite(self):
# a = ['fox', 'kangaroo', 'bison', 'bear']
# with Queryable(a) as q:
# b = q.as_parallel().select_many(identity, inc_chr).to_list()
# c = ['g', 'p', 'y', 'l', 'b', 'o', 'h', 'b', 's', 'p', 'p', 'c', 'j', 't', 'p', 'o', 'c', 'f', 'b', 's']
# self.assertEqual(len(b), len(c))
# self.assertEqual(set(b), set(c))
#
# def test_parallel_order_by(self):
# a = [27, 74, 18, 48, 57, 97, 76, 20, 91, 8, 80, 59, 20, 32, 58, 12, 74, 78, 4]
# with Queryable(a) as q:
# b = q.as_parallel().order_by().to_list()
# c = [4, 8, 12, 18, 20, 20, 27, 32, 48, 57, 58, 59, 74, 74, 76, 78, 80, 91, 97]
# self.assertEqual(b, c)
#
# #def test_parallel_order_by(self):
# # with Queryable(randgen()) as q:
# # a = q.as_parallel().take(1000000).order_by().to_list
#
#
# if __name__ == '__main__':
# suite = unittest.TestLoader().loadTestsFromTestCase(TestParallelQueryable)
# unittest.TextTestRunner(verbosity=2).run(suite)
| rob-smallshire/asq | asq/test/test_parallel_queryable.py | Python | mit | 2,706 |
import os
import shutil
import logging
from fuzzer import Showmap
l = logging.getLogger("grease_callback")
class GreaseCallback(object):
def __init__(self, grease_dir, grease_filter=None, grease_sorter=None):
self._grease_dir = grease_dir
assert os.path.exists(grease_dir)
self._grease_filter = grease_filter if grease_filter is not None else lambda x: True
self._grease_sorter = grease_sorter if grease_sorter is not None else lambda x: x
def grease_callback(self, fuzz):
l.warning("we are stuck, trying to grease the wheels!")
# find an unused input
grease_inputs = [
os.path.join(self._grease_dir, x) for x in os.listdir(self._grease_dir)
if self._grease_filter(os.path.join(self._grease_dir, x))
]
if len(grease_inputs) == 0:
l.warning("no grease inputs remaining")
return
# iterate until we find one with a new bitmap
bitmap = fuzz.bitmap()
for a in self._grease_sorter(grease_inputs):
if os.path.getsize(a) == 0:
continue
with open(a) as sf:
seed_content = sf.read()
smap = Showmap(fuzz.binary_path, seed_content)
shownmap = smap.showmap()
for k in shownmap:
#print(shownmap[k], (ord(bitmap[k % len(bitmap)]) ^ 0xff))
if shownmap[k] > (ord(bitmap[k % len(bitmap)]) ^ 0xff):
l.warning("Found interesting, syncing to tests")
fuzzer_out_dir = fuzz.out_dir
grease_dir = os.path.join(fuzzer_out_dir, "grease")
grease_queue_dir = os.path.join(grease_dir, "queue")
try:
os.mkdir(grease_dir)
os.mkdir(grease_queue_dir)
except OSError:
pass
id_num = len(os.listdir(grease_queue_dir))
filepath = "id:" + ("%d" % id_num).rjust(6, "0") + ",grease"
filepath = os.path.join(grease_queue_dir, filepath)
shutil.copy(a, filepath)
l.warning("copied grease input: %s", os.path.basename(a))
return
l.warning("No interesting inputs found")
__call__ = grease_callback
| shellphish/fuzzer | fuzzer/extensions/grease_callback.py | Python | bsd-2-clause | 2,360 |
from flask import Blueprint
user_module = Blueprint(
"user",
__name__,
url_prefix = "",
template_folder = "templates",
static_folder = "static"
)
from . import views
| hackBCA/missioncontrol | application/mod_user/__init__.py | Python | mit | 188 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
import generator
class PlccFootprint(generator.Footprint):
NUMBERING_REGULAR, NUMBERING_REVERSED = range(0, 2)
def __init__(self, name, count, size, space, numberStyle, body, style, description):
generator.Footprint.__init__(self, name=name, description=description)
self.count = count
self.size = size #Pad width, height
self.space = space #Horizontal and vertical spaces between pads
self.numberStyle = numberStyle
self.body = body
self.thickness = style[0]
self.gap = style[2]
self.dotRadius = self.thickness / 2.
self.label = generator.Label(name=name, position=(0.0, 0.0), thickness=self.thickness, font=style[1])
self.generate()
def generate(self):
count = self.count / 2
offset = self.space[0] / 2. if count % 2 == 0 else 0.
borders = (self.body[0] / 2., self.body[1] / 2.)
minimalBorderX = (count / 2) * self.space[0] + self.size[0] / 2. - offset + self.thickness / 2. + self.gap
if minimalBorderX > borders[0]:
borders = (minimalBorderX, borders[1])
if self.numberStyle == PlccFootprint.NUMBERING_REGULAR:
for pin in range(0, count):
index = pin - count / 2
x = index * self.space[0] + offset
self.pads.append(generator.SmdPad(1 + pin, self.size, (x, self.space[1])))
self.pads.append(generator.SmdPad(self.count - pin, self.size, (x, -self.space[1])))
elif self.numberStyle == PlccFootprint.NUMBERING_REVERSED:
if self.count != 4:
raise Exception()
#Fixed pin numbers
self.pads.append(generator.SmdPad(1, self.size, (-self.space[0] / 2., self.space[1])))
self.pads.append(generator.SmdPad(2, self.size, (-self.space[0] / 2., -self.space[1])))
self.pads.append(generator.SmdPad(3, self.size, (self.space[0] / 2., -self.space[1])))
self.pads.append(generator.SmdPad(4, self.size, (self.space[0] / 2., self.space[1])))
self.lines = []
self.lines.append(generator.Line((borders[0], borders[1]), (-borders[0], borders[1]), self.thickness))
self.lines.append(generator.Line((borders[0], -borders[1]), (-borders[0], -borders[1]), self.thickness))
self.lines.append(generator.Line((borders[0], borders[1]), (borders[0], -borders[1]), self.thickness))
self.lines.append(generator.Line((-borders[0], borders[1]), (-borders[0], -borders[1]), self.thickness))
dotMarkOffset = (-((count / 2) * self.space[0] - offset), self.space[1] + self.size[1] / 2. + self.gap\
+ self.dotRadius + self.thickness / 2.)
self.circles.append(generator.Circle(dotMarkOffset, self.dotRadius, self.thickness))
processFunc = lambda x: generator.collide_line(x, self.pads, self.thickness, self.gap)
processedLines = map(processFunc, self.lines)
self.lines = []
map(self.lines.extend, processedLines)
class Autogen:
STYLE_THIN, STYLE_THICK = (0.16, 0.82, 0.18), (0.2, 1.0, 0.25)
def __init__(self, modelType="wrl", isNew=False, path=None):
self.parts = []
if isNew:
self.converter = generator.NewConverter("opto/", path, "opto", modelType)
else:
self.converter = generator.OldConverter("opto/")
#Based on specifications from chinese LED
self.parts.append(PlccFootprint(name="LED3528-PLCC4", count=4, size=(0.9, 1.3), space=(1.7, 1.4),
numberStyle=PlccFootprint.NUMBERING_REVERSED, body=(2.8, 3.2), style=Autogen.STYLE_THIN,
description=""))
self.parts.append(PlccFootprint(name="LED3528-PLCC6", count=6, size=(0.6, 1.2), space=(0.95, 1.5),
numberStyle=PlccFootprint.NUMBERING_REGULAR, body=(2.8, 3.2), style=Autogen.STYLE_THIN,
description=""))
self.parts.append(PlccFootprint(name="LED5050-PLCC6", count=6, size=(1.2, 2.1), space=(1.55, 2.1),
numberStyle=PlccFootprint.NUMBERING_REGULAR, body=(5.0, 5.0), style=Autogen.STYLE_THICK,
description=""))
self.parts.append(PlccFootprint(name="LED6050-PLCC6", count=6, size=(1.5, 2.1), space=(2.1, 2.1),
numberStyle=PlccFootprint.NUMBERING_REGULAR, body=(6.0, 5.0), style=Autogen.STYLE_THICK,
description=""))
self.parts.sort(key=lambda x: x.name)
def text(self):
return self.converter.generateDocument(self.parts)
parser = argparse.ArgumentParser()
parser.add_argument("-f", dest="format", help="output file format", default="wrl")
parser.add_argument("-o", dest="output", help="write footprints to specified directory", default=None)
parser.add_argument("-p", dest="pretty", help="use S-Expression format", default=False, action="store_true")
options = parser.parse_args()
ag = Autogen(options.format, options.pretty, options.output)
print ag.text()
| stxent/kmodgen | footprints/back/generator_opto.py | Python | gpl-3.0 | 5,035 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
#
# Copyright (C) 2017 Lenovo, Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
# Module to send VLAG commands to Lenovo Switches
# Lenovo Networking
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: cnos_vlag
author: "Anil Kumar Muraleedharan (@amuraleedhar)"
short_description: Manage VLAG resources and attributes on devices running Lenovo CNOS
description:
- This module allows you to work with virtual Link Aggregation Groups
(vLAG) related configurations. The operators used are overloaded to ensure
control over switch vLAG configurations. Apart from the regular device
connection related attributes, there are four vLAG arguments which are
overloaded variables that will perform further configurations. They are
vlagArg1, vlagArg2, vlagArg3, and vlagArg4. For more details on how to use
these arguments, see [Overloaded Variables].
This module uses SSH to manage network device configuration.
The results of the operation will be placed in a directory named 'results'
that must be created by the user in their local directory to where the playbook is run.
For more information about this module from Lenovo and customizing it usage for your
use cases, please visit U(http://systemx.lenovofiles.com/help/index.jsp?topic=%2Fcom.lenovo.switchmgt.ansible.doc%2Fcnos_vlag.html)
version_added: "2.3"
extends_documentation_fragment: cnos
options:
vlagArg1:
description:
- This is an overloaded vlag first argument. Usage of this argument can be found is the User Guide referenced above.
required: Yes
default: Null
choices: [enable, auto-recovery,config-consistency,isl,mac-address-table,peer-gateway,priority,startup-delay,tier-id,vrrp,instance,hlthchk]
vlagArg2:
description:
- This is an overloaded vlag second argument. Usage of this argument can be found is the User Guide referenced above.
required: No
default: Null
choices: [Interval in seconds,disable or strict,Port Aggregation Number,VLAG priority,Delay time in seconds,VLAG tier-id value,
VLAG instance number,keepalive-attempts,keepalive-interval,retry-interval,peer-ip]
vlagArg3:
description:
- This is an overloaded vlag third argument. Usage of this argument can be found is the User Guide referenced above.
required: No
default: Null
choices: [enable or port-aggregation,Number of keepalive attempts,Interval in seconds,Interval in seconds,VLAG health check peer IP4 address]
vlagArg4:
description:
- This is an overloaded vlag fourth argument. Usage of this argument can be found is the User Guide referenced above.
required: No
default: Null
choices: [Port Aggregation Number,default or management]
'''
EXAMPLES = '''
Tasks : The following are examples of using the module cnos_vlag. These are written in the main.yml file of the tasks directory.
---
- name: Test Vlag - enable
cnos_vlag:
host: "{{ inventory_hostname }}"
username: "{{ hostvars[inventory_hostname]['ansible_ssh_user']}}"
password: "{{ hostvars[inventory_hostname]['ansible_ssh_pass']}}"
deviceType: "{{ hostvars[inventory_hostname]['deviceType']}}"
outputfile: "./results/cnos_vlag_{{ inventory_hostname }}_output.txt"
vlagArg1: "enable"
- name: Test Vlag - autorecovery
cnos_vlag:
host: "{{ inventory_hostname }}"
username: "{{ hostvars[inventory_hostname]['ansible_ssh_user']}}"
password: "{{ hostvars[inventory_hostname]['ansible_ssh_pass']}}"
deviceType: "{{ hostvars[inventory_hostname]['deviceType']}}"
outputfile: "./results/cnos_vlag_{{ inventory_hostname }}_output.txt"
vlagArg1: "auto-recovery"
vlagArg2: 266
- name: Test Vlag - config-consistency
cnos_vlag:
host: "{{ inventory_hostname }}"
username: "{{ hostvars[inventory_hostname]['ansible_ssh_user']}}"
password: "{{ hostvars[inventory_hostname]['ansible_ssh_pass']}}"
deviceType: "{{ hostvars[inventory_hostname]['deviceType']}}"
outputfile: "./results/cnos_vlag_{{ inventory_hostname }}_output.txt"
vlagArg1: "config-consistency"
vlagArg2: "strict"
- name: Test Vlag - isl
cnos_vlag:
host: "{{ inventory_hostname }}"
username: "{{ hostvars[inventory_hostname]['ansible_ssh_user']}}"
password: "{{ hostvars[inventory_hostname]['ansible_ssh_pass']}}"
deviceType: "{{ hostvars[inventory_hostname]['deviceType']}}"
outputfile: "./results/cnos_vlag_{{ inventory_hostname }}_output.txt"
vlagArg1: "isl"
vlagArg2: 23
- name: Test Vlag - mac-address-table
cnos_vlag:
host: "{{ inventory_hostname }}"
username: "{{ hostvars[inventory_hostname]['ansible_ssh_user']}}"
password: "{{ hostvars[inventory_hostname]['ansible_ssh_pass']}}"
deviceType: "{{ hostvars[inventory_hostname]['deviceType']}}"
outputfile: "./results/cnos_vlag_{{ inventory_hostname }}_output.txt"
vlagArg1: "mac-address-table"
- name: Test Vlag - peer-gateway
cnos_vlag:
host: "{{ inventory_hostname }}"
username: "{{ hostvars[inventory_hostname]['ansible_ssh_user']}}"
password: "{{ hostvars[inventory_hostname]['ansible_ssh_pass']}}"
deviceType: "{{ hostvars[inventory_hostname]['deviceType']}}"
outputfile: "./results/cnos_vlag_{{ inventory_hostname }}_output.txt"
vlagArg1: "peer-gateway"
- name: Test Vlag - priority
cnos_vlag:
host: "{{ inventory_hostname }}"
username: "{{ hostvars[inventory_hostname]['ansible_ssh_user']}}"
password: "{{ hostvars[inventory_hostname]['ansible_ssh_pass']}}"
deviceType: "{{ hostvars[inventory_hostname]['deviceType']}}"
outputfile: "./results/cnos_vlag_{{ inventory_hostname }}_output.txt"
vlagArg1: "priority"
vlagArg2: 1313
- name: Test Vlag - startup-delay
cnos_vlag:
host: "{{ inventory_hostname }}"
username: "{{ hostvars[inventory_hostname]['ansible_ssh_user']}}"
password: "{{ hostvars[inventory_hostname]['ansible_ssh_pass']}}"
deviceType: "{{ hostvars[inventory_hostname]['deviceType']}}"
outputfile: "./results/cnos_vlag_{{ inventory_hostname }}_output.txt"
vlagArg1: "startup-delay"
vlagArg2: 323
- name: Test Vlag - tier-id
cnos_vlag:
host: "{{ inventory_hostname }}"
username: "{{ hostvars[inventory_hostname]['ansible_ssh_user']}}"
password: "{{ hostvars[inventory_hostname]['ansible_ssh_pass']}}"
deviceType: "{{ hostvars[inventory_hostname]['deviceType']}}"
outputfile: "./results/cnos_vlag_{{ inventory_hostname }}_output.txt"
vlagArg1: "tier-id"
vlagArg2: 313
- name: Test Vlag - vrrp
cnos_vlag:
host: "{{ inventory_hostname }}"
username: "{{ hostvars[inventory_hostname]['ansible_ssh_user']}}"
password: "{{ hostvars[inventory_hostname]['ansible_ssh_pass']}}"
deviceType: "{{ hostvars[inventory_hostname]['deviceType']}}"
outputfile: "./results/cnos_vlag_{{ inventory_hostname }}_output.txt"
vlagArg1: "vrrp"
- name: Test Vlag - instance
cnos_vlag:
host: "{{ inventory_hostname }}"
username: "{{ hostvars[inventory_hostname]['ansible_ssh_user']}}"
password: "{{ hostvars[inventory_hostname]['ansible_ssh_pass']}}"
deviceType: "{{ hostvars[inventory_hostname]['deviceType']}}"
outputfile: "./results/cnos_vlag_{{ inventory_hostname }}_output.txt"
vlagArg1: "instance"
vlagArg2: 33
vlagArg3: 333
- name: Test Vlag - instance2
cnos_vlag:
host: "{{ inventory_hostname }}"
username: "{{ hostvars[inventory_hostname]['ansible_ssh_user']}}"
password: "{{ hostvars[inventory_hostname]['ansible_ssh_pass']}}"
deviceType: "{{ hostvars[inventory_hostname]['deviceType']}}"
outputfile: "./results/cnos_vlag_{{ inventory_hostname }}_output.txt"
vlagArg1: "instance"
vlagArg2: "33"
- name: Test Vlag - keepalive-attempts
cnos_vlag:
host: "{{ inventory_hostname }}"
username: "{{ hostvars[inventory_hostname]['ansible_ssh_user']}}"
password: "{{ hostvars[inventory_hostname]['ansible_ssh_pass']}}"
deviceType: "{{ hostvars[inventory_hostname]['deviceType']}}"
outputfile: "./results/cnos_vlag_{{ inventory_hostname }}_output.txt"
vlagArg1: "hlthchk"
vlagArg2: "keepalive-attempts"
vlagArg3: 13
- name: Test Vlag - keepalive-interval
cnos_vlag:
host: "{{ inventory_hostname }}"
username: "{{ hostvars[inventory_hostname]['ansible_ssh_user']}}"
password: "{{ hostvars[inventory_hostname]['ansible_ssh_pass']}}"
deviceType: "{{ hostvars[inventory_hostname]['deviceType']}}"
outputfile: "./results/cnos_vlag_{{ inventory_hostname }}_output.txt"
vlagArg1: "hlthchk"
vlagArg2: "keepalive-interval"
vlagArg3: 131
- name: Test Vlag - retry-interval
cnos_vlag:
host: "{{ inventory_hostname }}"
username: "{{ hostvars[inventory_hostname]['ansible_ssh_user']}}"
password: "{{ hostvars[inventory_hostname]['ansible_ssh_pass']}}"
deviceType: "{{ hostvars[inventory_hostname]['deviceType']}}"
outputfile: "./results/cnos_vlag_{{ inventory_hostname }}_output.txt"
vlagArg1: "hlthchk"
vlagArg2: "retry-interval"
vlagArg3: 133
- name: Test Vlag - peer ip
cnos_vlag:
host: "{{ inventory_hostname }}"
username: "{{ hostvars[inventory_hostname]['ansible_ssh_user']}}"
password: "{{ hostvars[inventory_hostname]['ansible_ssh_pass']}}"
deviceType: "{{ hostvars[inventory_hostname]['deviceType']}}"
outputfile: "./results/cnos_vlag_{{ inventory_hostname }}_output.txt"
vlagArg1: "hlthchk"
vlagArg2: "peer-ip"
vlagArg3: "1.2.3.4"
'''
RETURN = '''
msg:
description: Success or failure message
returned: always
type: string
sample: "vLAG configurations accomplished"
'''
import sys
import time
import socket
import array
import json
import time
import re
try:
from ansible.module_utils.network.cnos import cnos
HAS_LIB = True
except:
HAS_LIB = False
from ansible.module_utils.basic import AnsibleModule
from collections import defaultdict
def main():
#
# Define parameters for vlag creation entry
#
module = AnsibleModule(
argument_spec=dict(
outputfile=dict(required=True),
host=dict(required=True),
username=dict(required=True),
password=dict(required=True, no_log=True),
enablePassword=dict(required=False, no_log=True),
deviceType=dict(required=True),
vlagArg1=dict(required=True),
vlagArg2=dict(required=False),
vlagArg3=dict(required=False),
vlagArg4=dict(required=False),),
supports_check_mode=False)
outputfile = module.params['outputfile']
output = ""
# Send the CLi command
output = output + str(cnos.vlagConfig(module, '(config)#', None))
# Save it into the file
file = open(outputfile, "a")
file.write(output)
file.close()
# need to add logic to check when changes occur or not
errorMsg = cnos.checkOutputForError(output)
if(errorMsg is None):
module.exit_json(changed=True, msg="VLAG configurations accomplished")
else:
module.fail_json(msg=errorMsg)
if __name__ == '__main__':
main()
| hryamzik/ansible | lib/ansible/modules/network/cnos/cnos_vlag.py | Python | gpl-3.0 | 12,192 |
"""Test when and then steps are callables."""
import pytest
from pytest_bdd import given, when, then
@when('I do stuff')
def do_stuff():
pass
@then('I check stuff')
def check_stuff():
pass
def test_when_then(request):
"""Test when and then steps are callable functions.
This test checks that when and then are not evaluated
during fixture collection that might break the scenario.
"""
do_stuff_ = request.getfuncargvalue('I do stuff')
assert callable(do_stuff_)
check_stuff_ = request.getfuncargvalue('I check stuff')
assert callable(check_stuff_)
@pytest.mark.parametrize(
('step', 'keyword'), [
(given, 'Given'),
(when, 'When'),
(then, 'Then')])
def test_preserve_decorator(step, keyword):
"""Check that we preserve original function attributes after decorating it."""
@step(keyword)
def func():
"""Doc string."""
assert globals()[keyword].__doc__ == 'Doc string.'
| curzona/pytest-bdd | tests/steps/test_steps.py | Python | mit | 971 |
import angr
from . import io_file_data_for_arch
######################################
# fopen
######################################
def mode_to_flag(mode):
# TODO improve this: handle mode = strings
if mode[-1] == 'b': # lol who uses windows
mode = mode[:-1]
all_modes = {
"r" : angr.storage.file.Flags.O_RDONLY,
"r+" : angr.storage.file.Flags.O_RDWR,
"w" : angr.storage.file.Flags.O_WRTONLY | angr.storage.file.Flags.O_CREAT,
"w+" : angr.storage.file.Flags.O_RDWR | angr.storage.file.Flags.O_CREAT,
"a" : angr.storage.file.Flags.O_WRTONLY | angr.storage.file.Flags.O_CREAT | angr.storage.file.Flags.O_APPEND,
"a+" : angr.storage.file.Flags.O_RDWR | angr.storage.file.Flags.O_CREAT | angr.storage.file.Flags.O_APPEND
}
if mode not in all_modes:
raise angr.SimProcedureError('unsupported file open mode %s' % mode)
return all_modes[mode]
class fopen(angr.SimProcedure):
#pylint:disable=arguments-differ
def run(self, p_addr, m_addr):
strlen = angr.SIM_PROCEDURES['libc']['strlen']
p_strlen = self.inline_call(strlen, p_addr)
m_strlen = self.inline_call(strlen, m_addr)
p_expr = self.state.memory.load(p_addr, p_strlen.max_null_index, endness='Iend_BE')
m_expr = self.state.memory.load(m_addr, m_strlen.max_null_index, endness='Iend_BE')
path = self.state.se.eval(p_expr, cast_to=str)
mode = self.state.se.eval(m_expr, cast_to=str)
# TODO: handle append
fd = self.state.posix.open(path, mode_to_flag(mode))
if fd == -1:
# if open failed return NULL
return 0
else:
# Allocate a FILE struct in heap
malloc = angr.SIM_PROCEDURES['libc']['malloc']
io_file_data = io_file_data_for_arch(self.state.arch)
file_struct_ptr = self.inline_call(malloc, io_file_data['size']).ret_expr
# Write the fd
fd_bvv = self.state.se.BVV(fd, 4 * 8) # int
self.state.memory.store(file_struct_ptr + io_file_data['fd'],
fd_bvv,
endness=self.state.arch.memory_endness)
return file_struct_ptr
| f-prettyland/angr | angr/procedures/libc/fopen.py | Python | bsd-2-clause | 2,258 |
class PathError(Exception):
"""
parent class for all routing related errors
"""
pass
class MissingNodeError(Exception):
"""
a referenced node is missing in the graph
"""
def __init__(self, node):
self.node = node
def __str__(self):
return "Node %d in not in the graph" % self.node
class NotEnoughNodesError(Exception):
def __str__(self):
return "need at least two nodes to join something"
class RoutingError(Exception):
"""
there is no connection between to nodes
"""
def __init__(self, node_start, node_end):
self.node_start = node_start
self.node_end = node_end
def __str__(self):
return "No connection between node %d and node %d" % (self.node_start, self.node_end)
| geops/pg_routejoin | routejoin/common.py | Python | mit | 787 |
# Copyright 2016-2018 Peppy Player [email protected]
#
# This file is part of Peppy Player.
#
# Peppy Player is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Peppy Player is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Peppy Player. If not, see <http://www.gnu.org/licenses/>.
import math
from websiteparser.loyalbooks.newsparser import NewsParser
from websiteparser.loyalbooks.genreparser import GenreParser
from websiteparser.loyalbooks.languageparser import LanguageParser
from websiteparser.loyalbooks.bookparser import BookParser
from websiteparser.siteparser import TOTAL_PAGES, BOOK_SUMMARIES
from websiteparser.loyalbooks.constants import PAGE_PREFIX, TOP_100, BASE_URL, \
RESULTS_100, BOOKS_PAGE_SIZE
class LoyalBooksParser():
""" Parser for loyalbooks site """
def __init__(self):
""" Initializer """
self.news_parser = NewsParser()
self.genre_books_parser = GenreParser()
self.book_parser = BookParser()
self.language_parser = LanguageParser()
def get_books(self, page_num, language_url=""):
""" Return new books
:param page_num: page number
:param language_url: language constant
:return: books
"""
if language_url == None or len(language_url.strip()) == 0:
p = self.news_parser
else:
p = self.language_parser
p.parse(page_num - 1)
books = self.create_books_object(p)
if len(self.language_url) == 0:
tmp_parser = NewsParser()
else:
tmp_parser = LanguageParser()
tmp_parser.page_url_prefix = TOP_100
num = int(p.total_pages / BOOKS_PAGE_SIZE)
if p.site_total_pages == 0:
if len(self.language_url) == 0:
tmp_parser.url = BASE_URL + tmp_parser.page_url_prefix + str(num)
else:
tmp_parser.url = BASE_URL + self.language_url[0 : -1] + RESULTS_100 + PAGE_PREFIX + str(num)
p.total_pages -= 1
tmp_parser.parse_page(cache_books=False)
self.set_total_pages(p, tmp_parser, books)
p.site_total_pages = books[TOTAL_PAGES]
else:
books[TOTAL_PAGES] = p.site_total_pages
return books
def create_books_object(self, parser):
""" Prepare books object
:param parser: parser
:return: book object
"""
books = dict()
books[TOTAL_PAGES] = parser.total_pages
books[BOOK_SUMMARIES] = parser.items
return books
def get_book_audio_files_by_url(self, url, img_url):
""" Get the list of audio files for defined url
:param url: book url
:return: list of audio files
"""
self.book_parser.url = url
self.book_parser.parse()
return self.book_parser.playlist
def get_books_by_genre(self, genre, page_num):
""" Get the list of genre books
:param genre: genre name
:param page_num: page number
:return: list of genre books
"""
p = self.genre_books_parser
p.parse(page_num - 1, genre)
books = self.create_books_object(p)
num = int(p.total_pages / BOOKS_PAGE_SIZE)
tmp_parser = GenreParser()
if p.site_total_pages == 0:
tmp_parser.url = p.genre_url + RESULTS_100 + PAGE_PREFIX + str(num)
tmp_parser.parse_page(cache_books=False)
reminder = tmp_parser.items_num
n = (num - 1) * 100 + reminder
books[TOTAL_PAGES] = (math.ceil(n / BOOKS_PAGE_SIZE))
p.site_total_pages = books[TOTAL_PAGES]
else:
books[TOTAL_PAGES] = p.site_total_pages
return books
def set_total_pages(self, prev_parser, tmp_parser, books):
""" Set the real number of pages on books object
:param prev_parser: previous parser
:param tmp_parser: temporary parser
:param books: books object
"""
reminder = tmp_parser.items_num
site_num = int(prev_parser.total_pages / BOOKS_PAGE_SIZE)
total_books = site_num * 100 + reminder
books[TOTAL_PAGES] = (math.ceil(total_books / BOOKS_PAGE_SIZE))
| project-owner/Peppy | websiteparser/loyalbooks/loyalbooksparser.py | Python | gpl-3.0 | 4,833 |
from Screens.Screen import Screen
from Screens.MessageBox import MessageBox
from Screens.Console import Console
from Screens.Standby import TryQuitMainloop
from Components.ActionMap import ActionMap, NumberActionMap
from Components.Pixmap import Pixmap
from Tools.LoadPixmap import LoadPixmap
from Components.Label import Label
from Components.Sources.StaticText import StaticText
from Components.MenuList import MenuList
from Components.Sources.List import List
from Components.Button import Button
from Components.config import getConfigListEntry, configfile, ConfigSelection, ConfigSubsection, ConfigText, ConfigLocations
from Components.config import config
from Components.ConfigList import ConfigList,ConfigListScreen
from Components.FileList import MultiFileSelectList
from Components.Network import iNetwork
from Plugins.Plugin import PluginDescriptor
from enigma import eTimer, eEnv, eConsoleAppContainer, eEPGCache
from Tools.Directories import *
from os import system, popen, path, makedirs, listdir, access, stat, rename, remove, W_OK, R_OK
from time import gmtime, strftime, localtime, sleep
from datetime import date
from boxbranding import getBoxType, getMachineBrand, getMachineName
boxtype = getBoxType()
config.plugins.configurationbackup = ConfigSubsection()
if boxtype in ('maram9', 'classm', 'axodin', 'axodinc', 'starsatlx', 'genius', 'evo', 'galaxym6') and not path.exists("/media/hdd/backup_%s" %boxtype):
config.plugins.configurationbackup.backuplocation = ConfigText(default = '/media/backup/', visible_width = 50, fixed_size = False)
else:
config.plugins.configurationbackup.backuplocation = ConfigText(default = '/media/hdd/', visible_width = 50, fixed_size = False)
config.plugins.configurationbackup.backupdirs = ConfigLocations(default=[eEnv.resolve('${sysconfdir}/enigma2/'), '/etc/CCcam.cfg', '/usr/keys/',
'/etc/network/interfaces', '/etc/wpa_supplicant.conf', '/etc/wpa_supplicant.ath0.conf',
'/etc/wpa_supplicant.wlan0.conf', '/etc/resolv.conf', '/etc/default_gw', '/etc/hostname',
eEnv.resolve("${datadir}/enigma2/keymap.usr")])
def getBackupPath():
backuppath = config.plugins.configurationbackup.backuplocation.value
if backuppath.endswith('/'):
return backuppath + 'backup_' + boxtype
else:
return backuppath + '/backup_' + boxtype
def getOldBackupPath():
backuppath = config.plugins.configurationbackup.backuplocation.value
if backuppath.endswith('/'):
return backuppath + 'backup'
else:
return backuppath + '/backup'
def getBackupFilename():
return "enigma2settingsbackup.tar.gz"
def SettingsEntry(name, checked):
if checked:
picture = LoadPixmap(cached = True, path = resolveFilename(SCOPE_CURRENT_SKIN, "skin_default/icons/lock_on.png"));
else:
picture = LoadPixmap(cached = True, path = resolveFilename(SCOPE_CURRENT_SKIN, "skin_default/icons/lock_off.png"));
return (name, picture, checked)
class BackupScreen(Screen, ConfigListScreen):
skin = """
<screen position="135,144" size="350,310" title="Backup is running" >
<widget name="config" position="10,10" size="330,250" transparent="1" scrollbarMode="showOnDemand" />
</screen>"""
def __init__(self, session, runBackup = False):
Screen.__init__(self, session)
self.session = session
self.runBackup = runBackup
self["actions"] = ActionMap(["WizardActions", "DirectionActions"],
{
"ok": self.close,
"back": self.close,
"cancel": self.close,
}, -1)
self.finished_cb = None
self.backuppath = getBackupPath()
self.backupfile = getBackupFilename()
self.fullbackupfilename = self.backuppath + "/" + self.backupfile
self.list = []
ConfigListScreen.__init__(self, self.list)
self.onLayoutFinish.append(self.layoutFinished)
if self.runBackup:
self.onShown.append(self.doBackup)
def layoutFinished(self):
self.setWindowTitle()
def setWindowTitle(self):
self.setTitle(_("Backup is running..."))
def doBackup(self):
configfile.save()
if config.plugins.softwaremanager.epgcache.value:
eEPGCache.getInstance().save()
try:
if path.exists(self.backuppath) == False:
makedirs(self.backuppath)
self.backupdirs = ' '.join( config.plugins.configurationbackup.backupdirs.value )
if not "/tmp/installed-list.txt" in self.backupdirs:
self.backupdirs = self.backupdirs + " /tmp/installed-list.txt"
if not "/tmp/changed-configfiles.txt" in self.backupdirs:
self.backupdirs = self.backupdirs + " /tmp/changed-configfiles.txt"
cmd1 = "opkg list-installed | egrep 'enigma2-plugin-|task-base|packagegroup-base' > /tmp/installed-list.txt"
cmd2 = "opkg list-changed-conffiles > /tmp/changed-configfiles.txt"
cmd3 = "tar -czvf " + self.fullbackupfilename + " " + self.backupdirs
cmd = [cmd1, cmd2, cmd3]
if path.exists(self.fullbackupfilename):
dt = str(date.fromtimestamp(stat(self.fullbackupfilename).st_ctime))
self.newfilename = self.backuppath + "/" + dt + '-' + self.backupfile
if path.exists(self.newfilename):
remove(self.newfilename)
rename(self.fullbackupfilename,self.newfilename)
if self.finished_cb:
self.session.openWithCallback(self.finished_cb, Console, title = _("Backup is running..."), cmdlist = cmd,finishedCallback = self.backupFinishedCB,closeOnSuccess = True)
else:
self.session.open(Console, title = _("Backup is running..."), cmdlist = cmd,finishedCallback = self.backupFinishedCB, closeOnSuccess = True)
except OSError:
if self.finished_cb:
self.session.openWithCallback(self.finished_cb, MessageBox, _("Sorry, your backup destination is not writeable.\nPlease select a different one."), MessageBox.TYPE_INFO, timeout = 10 )
else:
self.session.openWithCallback(self.backupErrorCB,MessageBox, _("Sorry, your backup destination is not writeable.\nPlease select a different one."), MessageBox.TYPE_INFO, timeout = 10 )
def backupFinishedCB(self,retval = None):
self.close(True)
def backupErrorCB(self,retval = None):
self.close(False)
def runAsync(self, finished_cb):
self.finished_cb = finished_cb
self.doBackup()
class BackupSelection(Screen):
skin = """
<screen name="BackupSelection" position="center,center" size="560,400" title="Select files/folders to backup">
<ePixmap pixmap="buttons/red.png" position="0,0" size="140,40" alphatest="on" />
<ePixmap pixmap="buttons/green.png" position="140,0" size="140,40" alphatest="on" />
<ePixmap pixmap="buttons/yellow.png" position="280,0" size="140,40" alphatest="on" />
<widget source="key_red" render="Label" position="0,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#9f1313" transparent="1" />
<widget source="key_green" render="Label" position="140,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#1f771f" transparent="1" />
<widget source="key_yellow" render="Label" position="280,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#a08500" transparent="1" />
<widget name="checkList" position="5,50" size="550,250" transparent="1" scrollbarMode="showOnDemand" />
</screen>"""
def __init__(self, session):
Screen.__init__(self, session)
self["key_red"] = StaticText(_("Cancel"))
self["key_green"] = StaticText(_("Save"))
self["key_yellow"] = StaticText()
self["summary_description"] = StaticText("")
self.selectedFiles = config.plugins.configurationbackup.backupdirs.value
defaultDir = '/'
inhibitDirs = ["/bin", "/boot", "/dev", "/autofs", "/lib", "/proc", "/sbin", "/sys", "/hdd", "/tmp", "/mnt", "/media"]
self.filelist = MultiFileSelectList(self.selectedFiles, defaultDir, inhibitDirs = inhibitDirs )
self["checkList"] = self.filelist
self["actions"] = ActionMap(["DirectionActions", "OkCancelActions", "ShortcutActions"],
{
"cancel": self.exit,
"red": self.exit,
"yellow": self.changeSelectionState,
"green": self.saveSelection,
"ok": self.okClicked,
"left": self.left,
"right": self.right,
"down": self.down,
"up": self.up
}, -1)
if not self.selectionChanged in self["checkList"].onSelectionChanged:
self["checkList"].onSelectionChanged.append(self.selectionChanged)
self.onLayoutFinish.append(self.layoutFinished)
def layoutFinished(self):
idx = 0
self["checkList"].moveToIndex(idx)
self.setWindowTitle()
self.selectionChanged()
def setWindowTitle(self):
self.setTitle(_("Select files/folders to backup"))
def selectionChanged(self):
current = self["checkList"].getCurrent()[0]
self["summary_description"].text = current[3]
if current[2] is True:
self["key_yellow"].setText(_("Deselect"))
else:
self["key_yellow"].setText(_("Select"))
def up(self):
self["checkList"].up()
def down(self):
self["checkList"].down()
def left(self):
self["checkList"].pageUp()
def right(self):
self["checkList"].pageDown()
def changeSelectionState(self):
self["checkList"].changeSelectionState()
self.selectedFiles = self["checkList"].getSelectedList()
def saveSelection(self):
self.selectedFiles = self["checkList"].getSelectedList()
config.plugins.configurationbackup.backupdirs.setValue(self.selectedFiles)
config.plugins.configurationbackup.backupdirs.save()
config.plugins.configurationbackup.save()
config.save()
self.close(None)
def exit(self):
self.close(None)
def okClicked(self):
if self.filelist.canDescent():
self.filelist.descent()
class RestoreMenu(Screen):
skin = """
<screen name="RestoreMenu" position="center,center" size="560,400" title="Restore backups" >
<ePixmap pixmap="buttons/red.png" position="0,0" size="140,40" alphatest="on" />
<ePixmap pixmap="buttons/green.png" position="140,0" size="140,40" alphatest="on" />
<ePixmap pixmap="buttons/yellow.png" position="280,0" size="140,40" alphatest="on" />
<widget source="key_red" render="Label" position="0,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#9f1313" transparent="1" />
<widget source="key_green" render="Label" position="140,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#1f771f" transparent="1" />
<widget source="key_yellow" render="Label" position="280,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#a08500" transparent="1" />
<widget name="filelist" position="5,50" size="550,230" scrollbarMode="showOnDemand" />
</screen>"""
def __init__(self, session, plugin_path):
Screen.__init__(self, session)
self.skin_path = plugin_path
self["key_red"] = StaticText(_("Cancel"))
self["key_green"] = StaticText(_("Restore"))
self["key_yellow"] = StaticText(_("Delete"))
self["summary_description"] = StaticText("")
self.sel = []
self.val = []
self.entry = False
self.exe = False
self.path = ""
self["actions"] = NumberActionMap(["SetupActions"],
{
"ok": self.KeyOk,
"cancel": self.keyCancel,
"up": self.keyUp,
"down": self.keyDown
}, -1)
self["shortcuts"] = ActionMap(["ShortcutActions"],
{
"red": self.keyCancel,
"green": self.KeyOk,
"yellow": self.deleteFile,
})
self.flist = []
self["filelist"] = MenuList(self.flist)
self.fill_list()
self.onLayoutFinish.append(self.layoutFinished)
def layoutFinished(self):
self.setWindowTitle()
self.checkSummary()
def setWindowTitle(self):
self.setTitle(_("Restore backups"))
def fill_list(self):
self.flist = []
self.path = getBackupPath()
if path.exists(self.path) == False:
makedirs(self.path)
for file in listdir(self.path):
if file.endswith(".tar.gz"):
self.flist.append(file)
self.entry = True
self.flist.sort(reverse=True)
self["filelist"].l.setList(self.flist)
def KeyOk(self):
if (self.exe == False) and (self.entry == True):
self.sel = self["filelist"].getCurrent()
if self.sel:
self.val = self.path + "/" + self.sel
self.session.openWithCallback(self.startRestore, MessageBox, _("Are you sure you want to restore\nthe following backup:\n%s\nYour receiver will restart after the backup has been restored!") % self.sel)
def keyCancel(self):
self.close()
def keyUp(self):
self["filelist"].up()
self.checkSummary()
def keyDown(self):
self["filelist"].down()
self.checkSummary()
def startRestore(self, ret = False):
if ret == True:
self.session.openWithCallback(self.CB_startRestore, MessageBox, _("Do you want to delete the old settings in /etc/enigma2 first?"))
def CB_startRestore(self, ret = False):
self.exe = True
if ret == True:
self.session.open(Console, title = _("Restoring..."), cmdlist = ["rm -R /etc/enigma2", "tar -xzvf " + self.path + "/" + self.sel + " -C /", "killall -9 enigma2", "/etc/init.d/autofs restart"])
else:
self.session.open(Console, title = _("Restoring..."), cmdlist = ["tar -xzvf " + self.path + "/" + self.sel + " -C /", "killall -9 enigma2", "/etc/init.d/autofs restart"])
def deleteFile(self):
if (self.exe == False) and (self.entry == True):
self.sel = self["filelist"].getCurrent()
if self.sel:
self.val = self.path + "/" + self.sel
self.session.openWithCallback(self.startDelete, MessageBox, _("Are you sure you want to delete\nthe following backup:\n") + self.sel)
def startDelete(self, ret = False):
if ret == True:
self.exe = True
print "removing:",self.val
if path.exists(self.val) == True:
remove(self.val)
self.exe = False
self.fill_list()
def checkSummary(self):
cur = self["filelist"].getCurrent()
self["summary_description"].text = cur
class RestoreScreen(Screen, ConfigListScreen):
skin = """
<screen position="135,144" size="350,310" title="Restore is running..." >
<widget name="config" position="10,10" size="330,250" transparent="1" scrollbarMode="showOnDemand" />
</screen>"""
def __init__(self, session, runRestore = False):
Screen.__init__(self, session)
self.session = session
self.runRestore = runRestore
self["actions"] = ActionMap(["WizardActions", "DirectionActions"],
{
"ok": self.close,
"back": self.close,
"cancel": self.close,
}, -1)
self.backuppath = getBackupPath()
if not path.isdir(self.backuppath):
self.backuppath = getOldBackupPath()
self.backupfile = getBackupFilename()
self.fullbackupfilename = self.backuppath + "/" + self.backupfile
self.list = []
ConfigListScreen.__init__(self, self.list)
self.onLayoutFinish.append(self.layoutFinished)
if self.runRestore:
self.onShown.append(self.doRestore)
def layoutFinished(self):
self.setWindowTitle()
def setWindowTitle(self):
self.setTitle(_("Restoring..."))
def doRestore(self):
if path.exists("/proc/stb/vmpeg/0/dst_width"):
restorecmdlist = ["rm -R /etc/enigma2", "tar -xzvf " + self.fullbackupfilename + " -C /", "echo 0 > /proc/stb/vmpeg/0/dst_height", "echo 0 > /proc/stb/vmpeg/0/dst_left", "echo 0 > /proc/stb/vmpeg/0/dst_top", "echo 0 > /proc/stb/vmpeg/0/dst_width", "/etc/init.d/autofs restart"]
else:
restorecmdlist = ["rm -R /etc/enigma2", "tar -xzvf " + self.fullbackupfilename + " -C /", "/etc/init.d/autofs restart"]
print"[SOFTWARE MANAGER] Restore Settings !!!!"
self.session.open(Console, title = _("Restoring..."), cmdlist = restorecmdlist, finishedCallback = self.restoreFinishedCB)
def restoreFinishedCB(self,retval = None):
self.session.openWithCallback(self.checkPlugins, RestartNetwork)
def checkPlugins(self):
if path.exists("/tmp/installed-list.txt"):
if os.path.exists("/media/hdd/images/config/noplugins") and config.misc.firstrun.value:
self.userRestoreScript()
else:
self.session.openWithCallback(self.userRestoreScript, installedPlugins)
else:
self.userRestoreScript()
def userRestoreScript(self, ret = None):
SH_List = []
SH_List.append('/media/hdd/images/config/myrestore.sh')
SH_List.append('/media/usb/images/config/myrestore.sh')
SH_List.append('/media/cf/images/config/myrestore.sh')
startSH = None
for SH in SH_List:
if path.exists(SH):
startSH = SH
break
if startSH:
self.session.openWithCallback(self.restoreMetrixSkin, Console, title = _("Running Myrestore script, Please wait ..."), cmdlist = [startSH], closeOnSuccess = True)
else:
self.restoreMetrixSkin()
def restartGUI(self, ret = None):
self.session.open(Console, title = _("Your %s %s will Reboot...")% (getMachineBrand(), getMachineName()), cmdlist = ["killall -9 enigma2"])
def restoreMetrixSkin(self, ret = None):
try:
from Plugins.Extensions.MyMetrixLite.MainSettingsView import MainSettingsView
print"Restoring MyMetrixLite..."
MainSettingsView(None,True)
except:
pass
self.restartGUI()
def runAsync(self, finished_cb):
self.doRestore()
class RestartNetwork(Screen):
def __init__(self, session):
Screen.__init__(self, session)
skin = """
<screen name="RestartNetwork" position="center,center" size="600,100" title="Restart Network Adapter">
<widget name="label" position="10,30" size="500,50" halign="center" font="Regular;20" transparent="1" foregroundColor="white" />
</screen> """
self.skin = skin
self["label"] = Label(_("Please wait while your network is restarting..."))
self["summary_description"] = StaticText(_("Please wait while your network is restarting..."))
self.onShown.append(self.setWindowTitle)
self.onLayoutFinish.append(self.restartLan)
def setWindowTitle(self):
self.setTitle(_("Restart Network Adapter"))
def restartLan(self):
print"[SOFTWARE MANAGER] Restart Network"
iNetwork.restartNetwork(self.restartLanDataAvail)
def restartLanDataAvail(self, data):
if data is True:
iNetwork.getInterfaces(self.getInterfacesDataAvail)
def getInterfacesDataAvail(self, data):
self.close()
class installedPlugins(Screen):
UPDATE = 0
LIST = 1
skin = """
<screen position="center,center" size="600,100" title="Install Plugins" >
<widget name="label" position="10,30" size="500,50" halign="center" font="Regular;20" transparent="1" foregroundColor="white" />
</screen>"""
def __init__(self, session):
Screen.__init__(self, session)
Screen.setTitle(self, _("Install Plugins"))
self["label"] = Label(_("Please wait while we check your installed plugins..."))
self["summary_description"] = StaticText(_("Please wait while we check your installed plugins..."))
self.type = self.UPDATE
self.container = eConsoleAppContainer()
self.container.appClosed.append(self.runFinished)
self.container.dataAvail.append(self.dataAvail)
self.remainingdata = ""
self.pluginsInstalled = []
self.doUpdate()
def doUpdate(self):
print"[SOFTWARE MANAGER] update package list"
self.container.execute("opkg update")
def doList(self):
print"[SOFTWARE MANAGER] read installed package list"
self.container.execute("opkg list-installed | egrep 'enigma2-plugin-|task-base|packagegroup-base'")
def dataAvail(self, strData):
if self.type == self.LIST:
strData = self.remainingdata + strData
lines = strData.split('\n')
if len(lines[-1]):
self.remainingdata = lines[-1]
lines = lines[0:-1]
else:
self.remainingdata = ""
for x in lines:
self.pluginsInstalled.append(x[:x.find(' - ')])
def runFinished(self, retval):
if self.type == self.UPDATE:
self.type = self.LIST
self.doList()
elif self.type == self.LIST:
self.readPluginList()
def readPluginList(self):
self.PluginList = []
f = open("/tmp/installed-list.txt", "r")
lines = f.readlines()
for x in lines:
self.PluginList.append(x[:x.find(' - ')])
f.close()
self.createMenuList()
def createMenuList(self):
self.Menulist = []
for x in self.PluginList:
if x not in self.pluginsInstalled:
self.Menulist.append(SettingsEntry(x , True))
if len(self.Menulist) == 0:
self.close()
else:
if os.path.exists("/media/hdd/images/config/plugins") and config.misc.firstrun.value:
self.startInstall(True)
else:
self.session.openWithCallback(self.startInstall, MessageBox, _("Backup plugins found\ndo you want to install now?"))
def startInstall(self, ret = None):
if ret:
self.session.openWithCallback(self.restoreCB, RestorePlugins, self.Menulist)
else:
self.close()
def restoreCB(self, ret = None):
self.close()
class RestorePlugins(Screen):
def __init__(self, session, menulist):
Screen.__init__(self, session)
Screen.setTitle(self, _("Restore Plugins"))
self.index = 0
self.list = menulist
for r in menulist:
print "[SOFTWARE MANAGER] Plugin to restore: %s" % r[0]
self.container = eConsoleAppContainer()
self["menu"] = List(list())
self["menu"].onSelectionChanged.append(self.selectionChanged)
self["key_green"] = Button(_("Install"))
self["key_red"] = Button(_("Cancel"))
self["summary_description"] = StaticText("")
self["actions"] = ActionMap(["OkCancelActions", "ColorActions"],
{
"red": self.exit,
"green": self.green,
"cancel": self.exit,
"ok": self.ok
}, -2)
self["menu"].setList(menulist)
self["menu"].setIndex(self.index)
self.selectionChanged()
self.onShown.append(self.setWindowTitle)
def setWindowTitle(self):
self.setTitle(_("Restore Plugins"))
if os.path.exists("/media/hdd/images/config/plugins") and config.misc.firstrun.value:
self.green()
def exit(self):
self.close()
def green(self):
pluginlist = []
self.myipklist = []
for x in self.list:
if x[2]:
myipk = self.SearchIPK(x[0])
if myipk:
self.myipklist.append(myipk)
else:
pluginlist.append(x[0])
if len(pluginlist) > 0:
if len(self.myipklist) > 0:
self.session.open(Console, title = _("Installing plugins..."), cmdlist = ['opkg --force-overwrite install ' + ' '.join(pluginlist)], finishedCallback = self.installLocalIPK, closeOnSuccess = True)
else:
self.session.open(Console, title = _("Installing plugins..."), cmdlist = ['opkg --force-overwrite install ' + ' '.join(pluginlist)], finishedCallback = self.exit, closeOnSuccess = True)
elif len(self.myipklist) > 0:
self.installLocalIPK()
def installLocalIPK(self):
self.session.open(Console, title = _("Installing plugins..."), cmdlist = ['opkg --force-overwrite install ' + ' '.join(self.myipklist)], finishedCallback = self.exit, closeOnSuccess = True)
def ok(self):
index = self["menu"].getIndex()
item = self["menu"].getCurrent()[0]
state = self["menu"].getCurrent()[2]
if state:
self.list[index] = SettingsEntry(item , False)
else:
self.list[index] = SettingsEntry(item, True)
self["menu"].setList(self.list)
self["menu"].setIndex(index)
def selectionChanged(self):
index = self["menu"].getIndex()
if index == None:
index = 0
else:
self["summary_description"].text = self["menu"].getCurrent()[0]
self.index = index
def drawList(self):
self["menu"].setList(self.Menulist)
self["menu"].setIndex(self.index)
def exitNoPlugin(self, ret):
self.close()
def SearchIPK(self, ipkname):
ipkname = ipkname + "*"
search_dirs = [ "/media/hdd", "/media/usb" ]
sdirs = " ".join(search_dirs)
cmd = 'find %s -name "%s" | grep -iv "./open-multiboot/*" | head -n 1' % (sdirs, ipkname)
res = popen(cmd).read()
if res == "":
return None
else:
return res.replace("\n", "")
| philotas/enigma2 | lib/python/Plugins/SystemPlugins/SoftwareManager/BackupRestore.py | Python | gpl-2.0 | 23,248 |
# Opus/UrbanSim urban simulation software.
# Copyright (C) 2005-2009 University of Washington
# See opus_core/LICENSE
# Constants for checking returned answers
UNKNOWN_ANSWER = -1
NO = 0
YES = 1
CANCEL = 2
from PyQt4.QtGui import QMessageBox
from opus_gui.main.controllers.instance_handlers import get_mainwindow_instance
def _action_before_continue(question, buttons, parent_widget):
''' base for dialogs that ask users to close with dirty data '''
if parent_widget is None:
parent_widget = get_mainwindow_instance()
ok_answers = [QMessageBox.Apply, QMessageBox.Save, QMessageBox.Yes]
answer = QMessageBox.question(parent_widget, "Warning", question, *buttons)
if answer in ok_answers:
return YES
elif answer == QMessageBox.Discard:
return NO
elif answer == QMessageBox.Cancel:
return CANCEL
return UNKNOWN_ANSWER
def save_before_close(question, parent_widget = None):
'''
Ask the users if they want to save, discard or cancel before continuing.
@param question the question to ask the user
@return the answer (common_dialogs.YES|NO|CANCEL)
'''
buttons = (QMessageBox.Discard, QMessageBox.Save, QMessageBox.Cancel)
return _action_before_continue(question, buttons, parent_widget)
def apply_before_close(question, parent_widget = None):
'''
Ask the users if they want to apply, discard or cancel before continuing.
@param question the question to ask the user
@return the answer (common_dialogs.YES|NO|CANCEL)
'''
buttons = (QMessageBox.Discard, QMessageBox.Apply, QMessageBox.Cancel)
return _action_before_continue(question, buttons, parent_widget)
def user_is_sure(question, parent_widget = None):
buttons = (QMessageBox.Yes, QMessageBox.No)
return _action_before_continue(question, buttons, parent_widget) == YES
| christianurich/VIBe2UrbanSim | 3rdparty/opus/src/opus_gui/util/common_dialogs.py | Python | gpl-2.0 | 1,898 |
import re
from cfme.common import TopologyMixin, TimelinesMixin
from . import MiddlewareProvider
from utils.appliance import Navigatable
from utils.varmeth import variable
from . import _get_providers_page, _db_select_query
from . import download, MiddlewareBase, auth_btn, mon_btn
from utils.appliance.implementations.ui import navigate_to
from wrapanapi.hawkular import Hawkular
class HawkularProvider(MiddlewareBase, TopologyMixin, TimelinesMixin, MiddlewareProvider):
"""
HawkularProvider class holds provider data. Used to perform actions on hawkular provider page
Args:
name: Name of the provider
hostname: Hostname/IP of the provider
port: http/https port of hawkular provider
credentials: see Credential inner class.
key: The CFME key of the provider in the yaml.
db_id: database row id of provider
Usage:
myprov = HawkularProvider(name='foo',
hostname='localhost',
port=8080,
credentials=Provider.Credential(principal='admin', secret='foobar')))
myprov.create()
myprov.num_deployment(method="ui")
"""
STATS_TO_MATCH = MiddlewareProvider.STATS_TO_MATCH +\
['num_server', 'num_domain', 'num_deployment', 'num_datasource', 'num_messaging']
property_tuples = MiddlewareProvider.property_tuples +\
[('name', 'Name'), ('hostname', 'Host Name'), ('port', 'Port'), ('provider_type', 'Type')]
type_name = "hawkular"
mgmt_class = Hawkular
db_types = ["Hawkular::MiddlewareManager"]
def __init__(self, name=None, hostname=None, port=None, credentials=None, key=None,
appliance=None, sec_protocol=None, **kwargs):
Navigatable.__init__(self, appliance=appliance)
self.name = name
self.hostname = hostname
self.port = port
self.provider_type = 'Hawkular'
if not credentials:
credentials = {}
self.credentials = credentials
self.key = key
self.sec_protocol = sec_protocol if sec_protocol else 'Non-SSL'
self.db_id = kwargs['db_id'] if 'db_id' in kwargs else None
def _form_mapping(self, create=None, **kwargs):
return {'name_text': kwargs.get('name'),
'type_select': create and 'Hawkular',
'sec_protocol': kwargs.get('sec_protocol'),
'hostname_text': kwargs.get('hostname'),
'port_text': kwargs.get('port')}
@variable(alias='db')
def num_deployment(self):
return self._num_db_generic('middleware_deployments')
@num_deployment.variant('ui')
def num_deployment_ui(self, reload_data=True):
self.load_details(refresh=reload_data)
return int(self.get_detail("Relationships", "Middleware Deployments"))
@variable(alias='db')
def num_server(self):
return self._num_db_generic('middleware_servers')
@num_server.variant('ui')
def num_server_ui(self, reload_data=True):
self.load_details(refresh=reload_data)
return int(self.get_detail("Relationships", "Middleware Servers"))
@variable(alias='db')
def num_server_group(self):
res = self.appliance.db.client.engine.execute(
"SELECT count(*) "
"FROM ext_management_systems, middleware_domains, middleware_server_groups "
"WHERE middleware_domains.ems_id=ext_management_systems.id "
"AND middleware_domains.id=middleware_server_groups.domain_id "
"AND ext_management_systems.name='{0}'".format(self.name))
return int(res.first()[0])
@variable(alias='db')
def num_datasource(self):
return self._num_db_generic('middleware_datasources')
@num_datasource.variant('ui')
def num_datasource_ui(self, reload_data=True):
self.load_details(refresh=reload_data)
return int(self.get_detail("Relationships", "Middleware Datasources"))
@variable(alias='db')
def num_domain(self):
return self._num_db_generic('middleware_domains')
@num_domain.variant('ui')
def num_domain_ui(self, reload_data=True):
self.load_details(refresh=reload_data)
return int(self.get_detail("Relationships", "Middleware Domains"))
@variable(alias='db')
def num_messaging(self):
return self._num_db_generic('middleware_messagings')
@num_messaging.variant('ui')
def num_messaging_ui(self, reload_data=True):
self.load_details(refresh=reload_data)
return int(self.get_detail("Relationships", "Middleware Messagings"))
@variable(alias='ui')
def is_refreshed(self, reload_data=True):
self.load_details(refresh=reload_data)
if re.match('Success.*Minute.*Ago', self.get_detail("Status", "Last Refresh")):
return True
else:
return False
@is_refreshed.variant('db')
def is_refreshed_db(self):
ems = self.appliance.db.client['ext_management_systems']
dates = self.appliance.db.client.session.query(ems.created_on,
ems.updated_on).filter(ems.name == self.name).first()
return dates.updated_on > dates.created_on
@classmethod
def download(cls, extension):
_get_providers_page()
download(extension)
def load_details(self, refresh=False):
"""Call super class `load_details` and load `db_id` if not set"""
MiddlewareProvider.load_details(self, refresh=refresh)
if not self.db_id or refresh:
tmp_provider = _db_select_query(
name=self.name, type='ManageIQ::Providers::Hawkular::MiddlewareManager').first()
self.db_id = tmp_provider.id
def load_topology_page(self):
navigate_to(self, 'TopologyFromDetails')
def recheck_auth_status(self):
self.load_details(refresh=True)
auth_btn("Re-check Authentication Status")
def load_timelines_page(self):
self.load_details()
mon_btn("Timelines")
@staticmethod
def from_config(prov_config, prov_key, appliance=None):
credentials_key = prov_config['credentials']
credentials = HawkularProvider.process_credential_yaml_key(credentials_key)
return HawkularProvider(
name=prov_config['name'],
key=prov_key,
hostname=prov_config['hostname'],
sec_protocol=prov_config.get('sec_protocol'),
port=prov_config['port'],
credentials={'default': credentials},
appliance=appliance)
| dajohnso/cfme_tests | cfme/middleware/provider/hawkular.py | Python | gpl-2.0 | 6,574 |
from collections import deque
import threading
import time
import datetime
import putiopy
import os
from putiosync import multipart_downloader
class Download(object):
"""Object containing information about a download to be performed"""
def __init__(self, putio_file, destination_path):
self._putio_file = putio_file
self._destination_directory = destination_path
self._progress_callbacks = set()
self._start_callbacks = set()
self._completion_callbacks = set()
self._downloaded = 0
self._start_datetime = None
self._finish_datetime = None
def _fire_progress_callbacks(self):
for cb in list(self._progress_callbacks):
cb(self)
def _fire_start_callbacks(self):
for cb in list(self._start_callbacks):
cb(self)
def _fire_completion_callbacks(self):
for cb in list(self._completion_callbacks):
cb(self)
def get_putio_file(self):
return self._putio_file
def get_destination_directory(self):
return self._destination_directory
def get_filename(self):
return self.get_putio_file().name.encode('utf-8', 'ignore')
def get_destination_path(self):
return os.path.join(os.path.abspath(self._destination_directory),
self.get_filename())
def get_downloaded(self):
return self._downloaded
def get_size(self):
return self._putio_file.size
def get_start_datetime(self):
return self._start_datetime
def get_finish_datetime(self):
return self._finish_datetime
def add_start_callback(self, start_callback):
"""Add a callback to be called when there is new progress to report on a download
The callback will be called as follows::
progress_callback(download)
Information about the progress itself will be stored with the download.
"""
self._start_callbacks.add(start_callback)
def add_progress_callback(self, progress_callback):
"""Add a callback to be called whenever a new download is started
The callback will be called as follows::
start_callback(download)
"""
self._progress_callbacks.add(progress_callback)
def add_completion_callback(self, completion_callback):
"""Add a callback to be called whenever a download completes
The callback will be called as follows::
completion_callback(download)
"""
self._completion_callbacks.add(completion_callback)
def perform_download(self, token):
self._start_datetime = datetime.datetime.now()
self._fire_start_callbacks()
putio_file = self.get_putio_file()
dest = self.get_destination_directory()
filename = self.get_filename()
final_path = os.path.join(dest, filename.decode('utf-8'))
download_path = "{}.part".format(final_path.encode('utf-8'))
# ensure the path into which the download is going to be donwloaded exists. We know
# that the 'dest' directory exists but in some cases the filename on put.io may
# have directories within it (for an archive, as an example). In addition, some
# post-processing may delete directories, so let's just recreate the directory
if not os.path.exists(os.path.dirname(download_path)):
os.makedirs(os.path.dirname(download_path))
success = False
with open(download_path, 'wb') as f:
def transfer_callback(offset, chunk):
self._downloaded += len(chunk)
f.seek(offset)
f.write(chunk)
f.flush()
self._fire_progress_callbacks()
success = multipart_downloader.download(
putiopy.BASE_URL + '/files/{}/download'.format(putio_file.id),
self.get_size(),
transfer_callback,
params={'oauth_token': token})
# download to part file is complete. Now move to its final destination
if success:
if os.path.exists(final_path):
os.remove(final_path)
os.rename(download_path, download_path[:-5]) # same but without '.part'
self._finish_datetime = datetime.datetime.now()
self._fire_completion_callbacks()
return success
class DownloadManager(threading.Thread):
"""Component responsible for managing the queue of things to be downloaded"""
def __init__(self, token):
threading.Thread.__init__(self, name="DownloadManager")
self.setDaemon(True)
self._token = token
self._download_queue_lock = threading.RLock() # also used for locking calllback lists
self._download_queue = deque()
self._progress_callbacks = set()
self._start_callbacks = set()
self._completion_callbacks = set()
self._has_exit = False
def _build_callback(self, callbacks):
def callback(*args, **kwargs):
with self._download_queue_lock:
for cb in callbacks:
cb(*args, **kwargs)
return callback
def start(self):
"""Start this donwload manager"""
threading.Thread.start(self)
def add_download(self, download):
"""Add a download to be performed by this download manager"""
if not isinstance(download, Download):
raise TypeError("download must be of type QueuedDownload")
with self._download_queue_lock:
download.add_start_callback(self._build_callback(self._start_callbacks))
download.add_progress_callback(self._build_callback(self._progress_callbacks))
download.add_completion_callback(self._build_callback(self._completion_callbacks))
self._download_queue.append(download)
def add_download_start_progress(self, start_callback):
"""Add a callback to be called whenever a new download is started
The callback will be called as follows::
start_callback(download)
"""
with self._start_callbacks:
self._start_callbacks.add(start_callback)
def add_download_progress_callback(self, progress_callback):
"""Add a callback to be called when there is new progress to report on a download
The callback will be called as follows::
progress_callback(download)
Information about the progress itself will be stored with the download.
"""
with self._download_queue_lock:
self._progress_callbacks.add(progress_callback)
def add_download_completion_callback(self, completion_callback):
"""Add a callback to be called whenever a download completes
The callback will be called as follows::
completion_callback(download)
"""
with self._download_queue_lock:
self._completion_callbacks.add(completion_callback)
def get_downloads(self):
"""Get a list of the downloads active at this time"""
with self._download_queue_lock:
return list(self._download_queue)
def is_empty(self):
"""Return True if there are no queued downloads"""
with self._download_queue_lock:
return len(self._download_queue) == 0
def run(self):
"""Main loop for the download manager"""
while not self._has_exit:
try:
download = self._download_queue[0] # keep in queue until complete
except IndexError:
time.sleep(0.5) # don't busily spin
else:
success = download.perform_download(self._token)
self._download_queue.popleft()
if not success:
# re-add to the end of the queue for retry but do not keep any state that may have been
# associated with the failed download
self.add_download(Download(download.get_putio_file(),
download.get_destination_path()))
| posborne/putio-sync | putiosync/download_manager.py | Python | mit | 8,122 |
#!/usr/bin/env python
import urllib.request, urllib.parse, urllib.error, re
def getIP():
base_url = "http://whatsmyip.net/" #you can change this if needed
try:
webpage = urllib.request.urlopen(base_url).read().decode('utf-8')
except IOError as e:
return "Couldn't reach host\n"
classregex = re.compile("class=\"ip\".*?</h1>")#change this for the website you are using
ipregex = re.compile("\\b(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\b")
ipclass = classregex.search(webpage)
ipcontainer = ipregex.search(ipclass.group(0))
return ipcontainer.group(0)
if __name__ == "__main__":
print(getIP())
| notptr/Random-Scripts | wmip.py | Python | unlicense | 773 |
# -*- encoding: utf-8 -*-
# Testing new amara.tree API
# Testing quick reference examples
import unittest
import cStringIO
import amara
from amara.lib import testsupport
from amara import tree, xml_print
from amara import bindery
import sys, datetime
from amara.writers.struct import *
from amara.namespaces import *
class TestStructWriter(unittest.TestCase):
def setUp(self):
self.updated1 = datetime.datetime.now().isoformat()
self.output = cStringIO.StringIO()
tags = [u"xml", u"python", u"atom"]
w = structwriter(indent=u"yes", stream=self.output)
self.updated2 = datetime.datetime.now().isoformat()
w.feed(
ROOT(
E((ATOM_NAMESPACE, u'feed'), {(XML_NAMESPACE, u'xml:lang'): u'en'},
E(u'id', u'urn:bogus:myfeed'),
E(u'title', u'MyFeed'),
E(u'updated', self.updated1),
E(u'name',
E(u'title', u'Uche Ogbuji'),
E(u'uri', u'http://uche.ogbuji.net'),
E(u'email', u'[email protected]'),
),
E(u'link', {u'href': u'/blog'}),
E(u'link', {u'href': u'/blog/atom1.0', u'rel': u'self'}),
E(u'entry',
E(u'id', u'urn:bogus:myfeed:entry1'),
E(u'title', u'Hello world'),
E(u'updated', self.updated2),
( E(u'category', {u'term': t}) for t in tags ),
),
E(u'content', {u'type': u'xhtml'},
E((XHTML_NAMESPACE, u'div'),
E(u'p', u'Happy to be here')
)
)
))
)
def test_structwriter(self):
XML_output="""\
<?xml version="1.0" encoding="utf-8"?>
<feed xmlns="http://www.w3.org/2005/Atom" xml:lang="en">
<id>urn:bogus:myfeed</id>
<title>MyFeed</title>
<updated>%s</updated>
<name>
<title>Uche Ogbuji</title>
<uri>http://uche.ogbuji.net</uri>
<email>[email protected]</email>
</name>
<link href="/blog"/>
<link rel="self" href="/blog/atom1.0"/>
<entry>
<id>urn:bogus:myfeed:entry1</id>
<title>Hello world</title>
<updated>%s</updated>
<category term="xml"/>
<category term="python"/>
<category term="atom"/>
</entry>
<content type="xhtml">
<div xmlns="http://www.w3.org/1999/xhtml">
<p>Happy to be here</p>
</div>
</content>
</feed>\
"""
self.assertEqual(self.output.getvalue(), XML_output % (self.updated1, self.updated2))
if __name__ == '__main__':
unittest.main()
| zepheira/amara | test/sevendays/test_one.py | Python | apache-2.0 | 2,714 |
from .forms import UserContactForm, UserContactInfoForm, UserProfileForm, \
handle_user_profile_form, PhoneNumberForm
from .models import ContactInfo, Message, DialList, DialListParticipation, \
PhoneNumber, BICYCLE_DAY
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User
from django.forms import ModelForm
from django.http import HttpResponseRedirect as redirect, HttpResponse, Http404
from django.shortcuts import render
from django.template import RequestContext
from django.utils.datastructures import MultiValueDictKeyError
from what_apps.utility.forms import GenericPartyField, SimplePartyLookup
from what_apps.people.models import UserProfile
from what_apps.utility.functions import daily_crypt
import datetime
class ContactForm(ModelForm):
class Meta:
model = Message
def contact_form(request):
form=ContactForm()
return render(request, 'contact_form.html', locals())
@login_required
def contact_list(request):
people = User.objects.all().order_by('last_name')
return render(request, 'contact/contact_list.html', locals())
def contact_profile(request, contact_id=None, username=None):
if contact_id:
contact = ContactInfo.objects.get(id=contact_id)
if username:
contact = User.objects.get(first_name=username) #TODO: Turn into actual username, un-fuckify
contact_info = contact.__dict__.items()
return render(request, 'contact/contact_profile.html', locals())
@login_required
def new_contact(request):
'''
Adds or modifies a contact.
Makes a User object (and a UserProfile and ContactInfo) for them.
'''
#We're going to have two lists of forms; one for the three objects, and one for phone numbers.
contact_forms = []
phone_forms = []
blank_phone_form = PhoneNumberForm(prefix="phone_new")
if request.POST: #Are we posting new information?
#First let's figure out if we are dealing with an existing user or adding a new one.
try:
referenced_user = User.objects.get(id=request.POST['user_id'])
except MultiValueDictKeyError:
referenced_user = False #We didn't get a user passed in via POST.
user_form = UserContactForm(request.POST, prefix="user")
contact_form = UserContactInfoForm(request.POST, prefix="contact")
profile_form = UserProfileForm(request.POST, prefix="profile")
#Now we need to traverse the dict to pick out the phone forms.
#They may come in three types:
#phone_n, where n is the id of a PhoneNumber object - these are existing PhoneNumber objects
#phone_new - these are phone number objects added with the "add phone" button
#phone_get - this is the phone number passed into the form originally (used if you click "new contact" from a PhoneCall Task page.)
populated_phone_forms = []
for item, value in request.POST.items():
#Take note: item will be something like phone_new-number-2
if item.split('_')[0] == "phone":
#Look for phone items, but only branch off once per phone (ie, only on the "number," not the "type")
if item.split('_')[1].split('-')[1] == "number":
try:
entry = item.split('_')[1].split('-')[2] #Which entry is this? There might be more than one number.
type_string = str(item.split('-')[0]) + "-type-" + entry
except IndexError: #This is not a numbered entry.
type_string = str(item.split('-')[0]) + "-type"
type = request.POST[type_string]
number = value
if not(not number and not type): #We only want cases where both number and type are not blank. If either is filled in, we'll proceed.
case_indicator = item.split('_')[1].split('-')[0] #This will be either n, "new", or "get" as per above.
if case_indicator == "new" or case_indicator == "get" or 0:
try:
phone_number_object = PhoneNumber.objects.get(number=number)
populated_phone_forms.append(PhoneNumberForm({'number':number, 'type':type}, instance=phone_number_object))
except PhoneNumber.DoesNotExist:
populated_phone_forms.append(PhoneNumberForm({'number':number, 'type':type}))
else: #Assume that it's the n case
phone_number_object = PhoneNumber.objects.get(id=case_indicator)
populated_phone_forms.append(PhoneNumberForm({'number':number, 'type':type}, instance=phone_number_object))
#Send the forms to the handler for processing.
invalid = handle_user_profile_form(user_form, contact_form, profile_form, populated_phone_forms, user = referenced_user) #Returns forms tuple if forms are invalid; False otherwise
if not invalid: #Here we'll do something special if the handling went as we hoped.
'''
SUCCESS!
'''
if 'profile-birthday_month' in request.POST:
profile_form.instance.birth_month = request.POST['profile-birthday_month']
profile_form.instance.birth_day = request.POST['profile-birthday_day']
profile_form.instance.save()
#If wasn't working here. strange. TODO: Change from try block to if. :-)
try: #TODO: Justin and Kieran say: do this with sessions
role = request.GET['role']
if role == 'donor':
#Not good here - I want direct access to the user object by now. REFORM! (People like that reform, pappy)
encrypted_user_id = daily_crypt(user_form.instance.id) #Get the user id, encrypt it.
return redirect('/accounting/record_donation/?donor=' + encrypted_user_id)
except LookupError: #Probably ought to be some different branching here - they don't ALWAYS need to go to watch calls.
#Oh, and BTW, this is SUCCESS.
return redirect(contact_form.instance.get_absolute_url()) #Send them to the contact page.
else: #Not valid - let's tell them so.
contact_forms, phone_forms = invalid
return render(request, 'contact/new_contact.html', locals())
else: #No POST - this is a brand new form.
contact_forms = [UserContactForm(prefix="user"), UserContactInfoForm(prefix="contact"), UserProfileForm(prefix="profile")]
#We want email, first name, and last name to be required for all submissions.
contact_forms[0].fields['email'].required = True
contact_forms[0].fields['first_name'].required = True
contact_forms[0].fields['last_name'].required = True
try: #This might be a "new contact from phone number" request. Let's find out.
phone_forms.append(PhoneNumberForm(initial = {'number': request.GET['phone_number']}, prefix="phone_get")) #Yes it is! Put the phone number in the field.
except MultiValueDictKeyError:
pass #No, it isn't. Move along. Nothing to see here.
#Either we don't have POST (meaning this is a brand new form) or something is invalid.
#In either case, let's set the fields to required and give them the template again.
initial_lookup_form = SimplePartyLookup()
return render(request, 'contact/new_contact.html', locals())
@login_required #TODO: More security
def contact_forms_for_person(request):
contact_forms = []
phone_forms = []
referenced_user = User.objects.get(id=request.GET['user_id'])
user_form = UserContactForm(prefix="user", instance=referenced_user)
contact_forms.append(user_form)
blank_phone_form = PhoneNumberForm(prefix="phone_new")
try:
userprofile = referenced_user.userprofile
profile_form = UserProfileForm(prefix="profile", instance=userprofile)
try:
contact_info = userprofile.contact_info
contact_form = UserContactInfoForm(prefix="contact", instance=contact_info)
contact_forms.append(contact_form)
for phone_number in contact_info.phone_numbers.all():
phone_forms.append(PhoneNumberForm(instance=phone_number, prefix="phone_%s" % (phone_number.id)))
except ContactInfo.DoesNotExist:
contact_form = UserContactInfoForm(prefix="contact")
contact_forms.append(profile_form)
except UserProfile.DoesNotExist:
profile_form = UserProfileForm(request.POST, prefix="profile")
try:
phone_forms.append(PhoneNumberForm(initial = {'number': request.GET['phone_number']}, prefix="phone_get"))
except MultiValueDictKeyError:
pass
return render(request, 'contact/new_contact_inside_form.html', locals())
def toggle_dial_list(request, dial_list_id):
dial_list = DialList.objects.get(id=dial_list_id)
phone_number_id = request.POST['data']
phone_number_object = PhoneNumber.objects.get(id = phone_number_id)
active = False if request.POST['truthiness'] == 'false' else True
if active:
DialListParticipation.objects.create(
number = phone_number_object,
list = dial_list,
)
else:
try:
latest = DialListParticipation.objects.get(number = phone_number_object, list=dial_list, destroyed = BICYCLE_DAY)
latest.destroyed = datetime.datetime.now()
latest.save()
except DialListParticipation.DoesNotExist:
#Ummmm, they weren't on the list in the first place. No need to take them off it.
pass
return HttpResponse(1)
def phone_number_profile(request, phone_number_id):
phone_number = PhoneNumber.objects.get(id=phone_number_id)
if request.POST:
phone_number.spam = int(request.POST['spam'])
phone_number.save()
return render(request, 'contact/phone_number_profile.html', locals()) | SlashRoot/WHAT | what_apps/contact/views.py | Python | mit | 10,747 |
import cx_Oracle
class Connect:
def __init__(self):
self.con = None
def create_connection(self):
self.con = cx_Oracle.connect('root/qwerty12345@localhost')
def disconnect(self):
self.con.commit()
self.con.close()
del self.con
def return_cursor(self):
self.cur = self.con.cursor()
return self.cur
if __name__=='__main__':
test = Connect()
test.create_connection()
print test.return_cursor()
test.disconnect()
try:
print test.return_cursor()
except:
print "Connection not established" | vishalpant/Banking-management-system | Connect_db.py | Python | mit | 631 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Oct 25 16:03:08 2017
@author: alek
"""
import json
import uuid
import logging
import sys
import threading
import asyncio
import concurrent
from concurrent.futures import _base
from concurrent.futures import process
from functools import partial
from pyDist import intercom, stateless_intercom, Tasks
class InterfaceHolder(object):
def __init__(self):
logging.basicConfig(format='%(name)-12s:%(lineno)-3s | %(levelname)-8s | %(message)s'
, stream=sys.stdout, level=logging.DEBUG)
self.logger = logging.getLogger(__name__)
self.network_interface = NetworkInterface('nodeX', 'network')
self.user_interfaces = {}
self.node_interfaces = {}
self.client_interfaces = [] # I think this is for the webpage stuff???
self._condition = threading.Condition()
def get_interfaces_as_dict(self):
interface_dict = {'user_interfaces': [],
'node_interfaces': []}
for key in self.user_interfaces:
interface_dict['user_interfaces'].append(str(self.user_interfaces[key]))
for key2 in self.node_interfaces:
interface_dict['node_interfaces'].append(self.node_interfaces[key2].info())
return interface_dict
def connect_node(self, node_data):
with self._condition:
self.logger.debug('connecting node: %s' % node_data)
node_interface = self.find_node_by_node_id(node_data['node_id'])
if node_interface == None:
node_interface = NodeInterface()
node_interface.node_id = node_data['node_id']
node_interface.ip = node_data['ip']
node_interface.port = node_data['port']
self.node_interfaces.update({str(node_interface.node_id): node_interface})
self.logger.debug('node_interfaces (UPDATED): %s' % self.node_interfaces)
return json.dumps({'connected': True})
else:
return json.dumps({'connected': True})
def find_node_by_node_id(self, node_id):
return self.node_interfaces[node_id] if node_id in self.node_interfaces else None
##TODO:
# need to make the execution of the node stats such as
# core count async so this execution go faster. It will
# be a sequential query for now because that will be
# easier for tests.
def update_node_interface_data(self):
"""
For each node in the list of node interfaces update
the data for that node (Ex: core count, available cores, etc).
:return: None
"""
for node_interface in self.node_interfaces:
self.node_interfaces[node_interface].update_counts() # updates: num cores, num running, num queued
def connect_user(self, user_data):
with self._condition:
self.logger.debug('connecting test_nodeEndpoints: %s' % user_data)
user_interface = self.find_user_by_user_id(user_data['user_id'])
if user_interface == None:
user_interface = UserInterface(user_data['user_id'], user_data['group_id'])
self.user_interfaces.update({user_interface.user_id: user_interface})
return json.dumps({'connected': True})
else:
return json.dumps({'connected': True})
def find_user_by_user_id(self, user_id):
return self.user_interfaces[user_id] if user_id in self.user_interfaces else None
def find_user_work_item(self):
for user_id in self.user_interfaces:
user = self.user_interfaces[user_id]
if len(user.work_items_received)>0:
work_item = user.work_items_received.pop()
return user, work_item
return None, None
def find_network_work_item(self):
with self._condition:
if len(self.network_interface.work_items_received)>0:
work_item = self.network_interface.work_items_received.pop()
return self.network_interface, work_item
return None, None
def update_work_item_in_user(self, work_item):
with self._condition:
for user_id in self.user_interfaces:
user = self.user_interfaces[user_id]
if user.interface_id == work_item.interface_id:
#if not work_item.returning:
if work_item.item_id in user.work_items_running:
user.work_items_running.remove(work_item.item_id)
else:
logging.warning(f'work item not in users running list')
user.finished_work_item(work_item)
#self.logger.debug(f'job finished event: {test_nodeEndpoints.finished_event}')
self.remove_work_item_in_user_by_item_id(user, work_item.item_id)
return True
return False
def update_work_item_in_network(self, work_item):
with self._condition:
self.network_interface.work_items_running.remove(work_item.item_id)
self.network_interface.finished_work_item(work_item)
self.remove_work_item_in_user_by_item_id(self.network_interface, work_item.item_id)
self.logger.debug(f'network_interface: {self.network_interface}')
def find_user_by_interface_id(self, interface_id):
for user_id in self.user_interfaces:
user = self.user_interfaces[user_id]
if user.interface_id == interface_id:
return user
return None
def remove_work_item_in_user_by_item_id(self, user, item_id):
with self._condition:
for work_item in user.work_items_received:
if work_item.item_id == item_id:
user.work_items_received.remove(work_item)
def wait_for_first_finished_work_item_for_user(self, user):
user._finished_event.wait()
def find_finished_work_item_for_user(self, user):
for work_item in user.work_items_finished:
if work_item.ran:
user.work_items_finished.remove(work_item)
return work_item
return None
def reset_finished_event_for_user(self, user):
user.reset_finished_event()
def __str__(self):
return ('#users: %d, #servers: %d, #clients: %d'
% (len(self.user_interfaces)
, len(self.node_interfaces)
, len(self.client_interfaces)))
class UserInterface(object):
def __init__(self, user_id, group_id):
self.interface_id = uuid.uuid4()
self.user_id = user_id
self.group_id = group_id
self.work_items_received = []
self.work_items_running = []
self.work_items_finished = []
self._condition = threading.Condition()
self._finished_event = threading.Event()
def add_received_work_item(self, work_item):
with self._condition:
work_item.interface_id = self.interface_id
self.work_items_received.append(work_item)
def add_running_work_item(self, work_item):
with self._condition:
self.work_items_running.append(work_item)
def add_finished_work_item(self, work_item):
with self._condition:
self.work_items_finished.append(work_item)
self._finished_event.set()
def finished_work_item(self, work_item):
with self._condition:
self.work_items_finished.append(work_item)
self._finished_event.set()
def reset_finished_event(self):
with self._condition:
if (len(self.work_items_finished)==0):
self._finished_event.clear()
else:
self._finished_event.set()
def counts(self):
return ('#recv: %d, #running: %d, #fin: %d'
% (len(self.work_items_received)
, len(self.work_items_running)
, len(self.work_items_finished)))
def counts_dict(self):
return {'received': len(self.work_items_received),
'running': len(self.work_items_running),
'finished': len(self.work_items_finished)}
def __str__(self):
return ('user_id: %s, group_id: %s'
% (self.user_id, self.group_id))
class NetworkInterface(UserInterface):
def __init__(self, domain_id, network_id):
UserInterface.__init__(self, domain_id, network_id)
self.is_network_interface = False
def add_received_work_item(self, work_item):
with self._condition:
self.work_items_received.append(work_item)
class NodeInterface(object):
def __init__(self):
self.node_id = uuid.uuid4()
self.ip = None
self.port = None
self.num_cores = None
self.num_running = None #for test_nodeEndpoints side only
self.num_queued = None #for test_nodeEndpoints side only
self.work_items_sent = {} #for test_nodeEndpoints side only
self.params = {}
self.event_loop = asyncio.new_event_loop()
logging.basicConfig(format='%(name)-12s:%(lineno)-3s | %(levelname)-8s | %(message)s'
, stream=sys.stdout, level=logging.DEBUG)
self.logger = logging.getLogger(__name__)
def info(self):
return {'node_id': str(self.node_id), 'ip': self.ip
, 'port': self.port, 'num_cores': self.num_cores
, 'num_running': self.num_running
, 'num_queued': self.num_queued
, 'params': self.params}
def get_signature(self):
return {'node_id': self.node_id, 'ip': self.ip
, 'port': self.port}
def update_counts(self):
#response = self.event_loop.run_until_complete(intercom.get_counts(self.ip, self.port))
response = stateless_intercom.get_counts(self.ip, self.port)
self.logger.debug(f'response of update_count(): {response}')
self.num_cores = response["num_cores"] if "num_cores" in response else 1
self.num_running = response["num_tasks_running"] if "num_tasks_running" in response else 1
self.num_queued = response["num_tasks_queued"] if "num_tasks_queued" in response else 1
return response
def update_info(self):
response = self.event_loop.run_until_complete(intercom.get_node_info(self.ip, self.port))
self.node_id = uuid.UUID(str(response['node_id'])) if 'node_id' in response else None
self.ip = response['ip'] if 'ip' in response else None
self.port = response['port'] if 'port' in response else None
self.num_cores = response['num_cores'] if 'num_cores' in response else None
self.num_running = response['num_running'] if 'num_running' in response else None
self.num_queued = response['num_queued'] if 'num_queued' in response else None
return response
def add_work_item(self, work_item):
self.logger.debug('C <--- C work_item: %s' % work_item)
#response = self.event_loop.run_until_complete(intercom.post_work_item(self.ip, self.port
# , task, params=self.params))
response = stateless_intercom.post_work_item(self.ip, self.port, work_item, params=self.params)
if response['task_added']:
self.work_items_sent[work_item.item_id] = work_item
return True
else:
return False
def get_num_cores(self):
return self.num_cores
def get_num_running(self):
return self.num_running
class ClusterExecutor(_base.Executor):
def __init__(self, ip, port):
self.ip = ip
self.port = port
self.user_id = None
self.group_id = None
self.params = {}
self.tasks_sent = {}
self.tasks_pending = {}
self.futures = []
self.tasks_received = 0
self.worker_loop = None
self.event_loop = asyncio.get_event_loop()
self.worker_thread = None
self._condition = threading.Condition()
self._work_item_sent = threading.Event()
self._closed = threading.Event()
logging.basicConfig(format='%(name)-12s:%(lineno)-3s | %(levelname)-8s | %(message)s'
, stream=sys.stdout, level=logging.DEBUG)
self.logger = logging.getLogger(__name__)
def connect(self, user_id, group_id='base'):
response = self.event_loop.run_until_complete(intercom.connect_user(self.ip
, self.port, params={'user_id': user_id
, 'group_id': group_id}))
if 'connected' in response and response['connected']:
self.user_id = user_id
self.group_id = group_id
self._update_params()
self.worker_loop = asyncio.new_event_loop()
self.worker_thread = threading.Thread(target=self.event_loop_worker, args=(self.worker_loop,))
self.worker_thread.start()
return True
else:
return False
def shutdown_executor(self):
response = self.event_loop.run_until_complete(intercom.shutdown_executor(self.ip, self.port))
return response
def disconnect(self):
self._closed.set()
self._work_item_sent.set()
self.worker_loop.call_soon_threadsafe(self.stop_worker_loop)
self.worker_thread.join()
self.worker_loop.stop()
self.worker_loop.close()
self.logger.debug(f'self.worker_thread: {self.worker_thread}')
self.logger.debug(f'self.worker_loop: {self.worker_loop}')
self.logger.debug(f'self.event_loop : {self.event_loop }')
def stop_worker_loop(self):
self.logger.debug('*----> called stop worker loop')
self.worker_loop.stop()
def _update_params(self):
self.params = {'user_id': self.user_id
, 'group_id': self.group_id}
#EXECUTOR METHODS HERE####################
#submit a new job here
#this is where a new task needs to be created
def submit(self, fn, *args, **kwargs):
task = Tasks.Task(fn, args, kwargs)
with self._condition:
task_added = self.add_work_item(task)
if task_added:
self._work_item_sent.set()
return task.future
def map(self, fn, *iterables, timeout=None, chunksize=1):
self.logger.debug('map function')
if chunksize < 1:
raise ValueError("chunksize must be >= 1.")
results = super().map(partial(process._process_chunk, fn),
process._get_chunks(*iterables, chunksize=chunksize),
timeout=timeout)
return results
def as_completed(self):
#self.logger.debug(f'len(self.futures): {len(self.futures)}')
return concurrent.futures.as_completed(self.futures)
def clear_futures(self):
self.futures = []
def shutdown(self, wait=True):
self.logger.debug('shutdown()')
#self.server_loop.call_soon_threadsafe(self.server_loop.stop)
##########################################
def add_work_item(self, task):
self.logger.debug('C <--- U work item: %s' % task)
response = self.event_loop.run_until_complete(intercom.post_work_item(self.ip,
self.port,
task,
params=self.params))
if 'task_added' in response and response['task_added']:
with self._condition:
self.tasks_sent[task.work_item.item_id] = task
self.add_future(task.future)
return True
else:
with self._condition:
self.logger.debug('task added to pending tasks')
self.tasks_pending[task.work_item.item_id] = task
self.add_future(task.future)
return False
def add_future(self, future):
self.futures.append(future)
async def finished_work_item_thread(self):
self.logger.debug('*** Started the finished_task_thread')
while True:
try:
self._work_item_sent.wait()
with self._condition:
if self.tasks_received == len(self.tasks_sent):
if self._closed.is_set():
break
self._work_item_sent.clear()
self._work_item_sent.wait()
if self._closed.is_set():
break
work_item = await intercom.get_single_task(self.ip, self.port, params=self.params)
work_item.unpickleInnerData()
except RuntimeError as e:
self.logger.error('Error in finished work item process')
if self._closed.is_set():
break
self.logger.debug('C ---> U task: %s' % work_item)
if work_item.item_id in self.tasks_sent:
#self.logger.debug(f'has the work item been run: {self.tasks_sent[work_item.item_id].work_item.ran}')
if self.tasks_sent[work_item.item_id].work_item.ran:
#self.logger.debug(f'<-*-> the work item has already been accepted try another')
continue
with self._condition:
self.tasks_sent[work_item.item_id].update(work_item)
self.logger.debug('updated the work item')
self.tasks_received += 1
self.logger.debug(f'*** self.tasks_received={self.tasks_received} ***')
else:
self.logger.warning('NOT A WORK ITEM SENT BY THIS EXECUTOR')
self.logger.debug('*** Ended the finished_task_thread')
def event_loop_worker(self, loop):
self.logger.debug('*** start of loop worker')
asyncio.set_event_loop(loop)
#asyncio.run_until_complete(self.finished_work_item_thread())
try:
#loop.run_forever()
loop.run_until_complete(self.finished_work_item_thread())
except RuntimeError as e:
self.logger.error('*** stopped updating work items (should be here)')
finally:
self.logger.debug('*** finally shutting down async gens ***')
loop.run_until_complete(loop.shutdown_asyncgens())
loop.close()
self.logger.debug('*** end of loop worker')
exit()
def __str__(self):
return ('ip: %s, port: %s, user_id: %s, group_id: %s'
% (self.ip, self.port, self.user_id, self.group_id))
if __name__ == '__main__':
print('Interfaces Testing')
| alekLukanen/pyDist | pyDist/Interfaces.py | Python | mit | 19,248 |
class BinaryTree():
def __init__(self, val):
self.value = val
self.left = None
self.right = None
self.parent = None
def set_left(self,node):
self.left = node
self.left.parent = self
def set_right(self,node):
self.right = node
self.right.parent = self
def inorder(self):
left_vals = self.left.inorder() if self.left is not None else []
right_vals = self.right.inorder() if self.right is not None else []
return left_vals + [self.value] + right_vals
if __name__ == '__main__':
tree = BinaryTree(4)
left = BinaryTree(3)
left.set_left(BinaryTree(1))
left.set_right(BinaryTree(20))
right = BinaryTree(7)
right.set_left(BinaryTree(6))
right.set_right(BinaryTree(30))
tree.set_left(left)
tree.set_right(right)
print tree.inorder() | pavantrinath/basics_written_in_python | Trees and graphs/BinaryTree.py | Python | gpl-2.0 | 873 |
# This file is imported when the zone wants to load an instance of this zone.
from games.zones.basezone import randloc, randrot, randscale
from games.zones.basezone import BaseZone
from elixir_models import Object
class Zone(BaseZone):
def __init__(self, logger=None, *args, **kwargs):
'''Initialize the zone.
Insert whatever objects into the database programmatically.
This includes loading things from a disk file if you so choose.
It will not run more than once on the zone's database.
If you want new content on an existing database, either make
a script to apply changes, or just delete the database for
that zone and recreate it when the zone is started up again.
'''
super(Zone, self).__init__()
self.logger.info("Starting GhibliHills zone script...")
def insert_objects(self):
'''Insert any objects you want to be present in the zone into the
database in this call.
This gets called exactly once per database. If you change something here
and want it to appear in the zone's database, you will need to clear the
database first.
Deleting the "Loading Complete" object will only cause duplicates.
Do not do this.
'''
self.logger.info("Placing chickens...")
# Place 10 chickens randomly:
for i in xrange(10):
obj = Object()
obj.name = "Chicken #%d" % i
obj.resource = 'chicken'
obj.loc_x, obj.loc_y, obj.loc_z = randloc(), randloc(), randloc()
obj.rot_x, obj.rot_y, obj.rot_z = randrot(), randrot(), randrot()
obj.scale_x, obj.scale_y, obj.scale_z = randscale(), randscale(), randscale()
obj.vel_x, obj.vel_y, obj.vel_z = 0, 0, 0
obj.states.extend(['alive', 'whole', 'clickable'])
obj.scripts = ['games.objects.chicken']
obj.save()
self.logger.info(str([o.name for o in Object.get_objects()]))
| cnelsonsic/SimpleMMO | games/zones/GhibliHills.py | Python | agpl-3.0 | 2,016 |
# coding: utf-8
"""
Onshape REST API
The Onshape REST API consumed by all clients. # noqa: E501
The version of the OpenAPI document: 1.113
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
import sys # noqa: F401
import six # noqa: F401
import nulltype # noqa: F401
from onshape_client.oas.model_utils import ( # noqa: F401
ModelComposed,
ModelNormal,
ModelSimple,
date,
datetime,
file_type,
int,
none_type,
str,
validate_get_composed_info,
)
try:
from onshape_client.oas.models import bt_export_model_arc_edge_geometry1257
except ImportError:
bt_export_model_arc_edge_geometry1257 = sys.modules[
"onshape_client.oas.models.bt_export_model_arc_edge_geometry1257"
]
try:
from onshape_client.oas.models import bt_vector3d389
except ImportError:
bt_vector3d389 = sys.modules["onshape_client.oas.models.bt_vector3d389"]
class BTExportModelEdgeGeometry1125(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {}
validations = {}
additional_properties_type = None
@staticmethod
def openapi_types():
"""
This must be a class method so a model may have properties that are
of type self, this ensures that we don't create a cyclic import
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
"bt_type": (str,), # noqa: E501
"end_point": (bt_vector3d389.BTVector3d389,), # noqa: E501
"end_vector": (bt_vector3d389.BTVector3d389,), # noqa: E501
"length": (float,), # noqa: E501
"mid_point": (bt_vector3d389.BTVector3d389,), # noqa: E501
"quarter_point": (bt_vector3d389.BTVector3d389,), # noqa: E501
"start_point": (bt_vector3d389.BTVector3d389,), # noqa: E501
"start_vector": (bt_vector3d389.BTVector3d389,), # noqa: E501
}
@staticmethod
def discriminator():
return {
"bt_type": {
"BTExportModelArcEdgeGeometry-1257": bt_export_model_arc_edge_geometry1257.BTExportModelArcEdgeGeometry1257,
},
}
attribute_map = {
"bt_type": "btType", # noqa: E501
"end_point": "endPoint", # noqa: E501
"end_vector": "endVector", # noqa: E501
"length": "length", # noqa: E501
"mid_point": "midPoint", # noqa: E501
"quarter_point": "quarterPoint", # noqa: E501
"start_point": "startPoint", # noqa: E501
"start_vector": "startVector", # noqa: E501
}
@staticmethod
def _composed_schemas():
return None
required_properties = set(
[
"_data_store",
"_check_type",
"_from_server",
"_path_to_item",
"_configuration",
]
)
def __init__(
self,
_check_type=True,
_from_server=False,
_path_to_item=(),
_configuration=None,
**kwargs
): # noqa: E501
"""bt_export_model_edge_geometry1125.BTExportModelEdgeGeometry1125 - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_from_server (bool): True if the data is from the server
False if the data is from the client (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
bt_type (str): [optional] # noqa: E501
end_point (bt_vector3d389.BTVector3d389): [optional] # noqa: E501
end_vector (bt_vector3d389.BTVector3d389): [optional] # noqa: E501
length (float): [optional] # noqa: E501
mid_point (bt_vector3d389.BTVector3d389): [optional] # noqa: E501
quarter_point (bt_vector3d389.BTVector3d389): [optional] # noqa: E501
start_point (bt_vector3d389.BTVector3d389): [optional] # noqa: E501
start_vector (bt_vector3d389.BTVector3d389): [optional] # noqa: E501
"""
self._data_store = {}
self._check_type = _check_type
self._from_server = _from_server
self._path_to_item = _path_to_item
self._configuration = _configuration
for var_name, var_value in six.iteritems(kwargs):
if (
var_name not in self.attribute_map
and self._configuration is not None
and self._configuration.discard_unknown_keys
and self.additional_properties_type is None
):
# discard variable.
continue
setattr(self, var_name, var_value)
@classmethod
def get_discriminator_class(cls, from_server, data):
"""Returns the child class specified by the discriminator"""
discriminator = cls.discriminator()
discr_propertyname_py = list(discriminator.keys())[0]
discr_propertyname_js = cls.attribute_map[discr_propertyname_py]
if from_server:
class_name = data[discr_propertyname_js]
else:
class_name = data[discr_propertyname_py]
class_name_to_discr_class = discriminator[discr_propertyname_py]
return class_name_to_discr_class.get(class_name)
| onshape-public/onshape-clients | python/onshape_client/oas/models/bt_export_model_edge_geometry1125.py | Python | mit | 7,154 |
#Borrowed from ePad error popup done by ylee
from efl.evas import EVAS_HINT_EXPAND, EVAS_HINT_FILL
from efl import elementary
from efl.elementary.box import Box
from efl.elementary.icon import Icon
from efl.elementary.button import Button
from efl.elementary.image import Image
from efl.elementary.popup import Popup
from efl.elementary.label import Label, ELM_WRAP_WORD
from efl.elementary.table import Table
from efl.elementary.need import need_ethumb
EXPAND_BOTH = EVAS_HINT_EXPAND, EVAS_HINT_EXPAND
EXPAND_HORIZ = EVAS_HINT_EXPAND, 0.0
FILL_BOTH = EVAS_HINT_FILL, EVAS_HINT_FILL
FILL_HORIZ = EVAS_HINT_FILL, 0.5
ALIGN_CENTER = 0.5, 0.5
class StandardPopup(Popup):
def __init__(self, ourParent, ourMsg, ourIcon=None, *args, **kwargs):
Popup.__init__(self, ourParent, *args, **kwargs)
self.callback_block_clicked_add(lambda obj: self.delete())
# Add a table to hold dialog image and text to Popup
tb = Table(self, size_hint_weight=EXPAND_BOTH)
self.part_content_set("default", tb)
tb.show()
# Add dialog-error Image to table
need_ethumb()
icon = Icon(self, thumb='True')
icon.standard_set(ourIcon)
# Using gksudo or sudo fails to load Image here
# unless options specify using preserving their existing environment.
# may also fail to load other icons but does not raise an exception
# in that situation.
# Works fine using eSudo as a gksudo alternative,
# other alternatives not tested
try:
dialogImage = Image(self,
size_hint_weight=EXPAND_HORIZ,
size_hint_align=FILL_BOTH,
file=icon.file_get())
tb.pack(dialogImage, 0, 0, 1, 1)
dialogImage.show()
except RuntimeError:
# An error message is displayed for this same error
# when aboutWin is initialized so no need to redisplay.
pass
# Add dialog text to table
dialogLabel = Label(self, line_wrap=ELM_WRAP_WORD,
size_hint_weight=EXPAND_HORIZ,
size_hint_align=FILL_BOTH)
dialogLabel.text = ourMsg
tb.pack(dialogLabel, 1, 0, 1, 1)
dialogLabel.show()
# Ok Button
ok_btt = Button(self)
ok_btt.text = "Ok"
ok_btt.callback_clicked_add(lambda obj: self.delete())
ok_btt.show()
# add button to popup
self.part_content_set("button3", ok_btt)
| JeffHoogland/bodhi3packages | python-elm-extensions/usr/lib/python2.7/dist-packages/elmextensions/StandardPopup.py | Python | bsd-3-clause | 2,570 |
"""Emulated IEEE 754 floating-point arithmetic.
"""
from ..titanic import gmpmath
from ..titanic import digital
from ..titanic.integral import bitmask
from ..titanic.ops import RM, OP
from .evalctx import IEEECtx
from . import mpnum
from . import interpreter
used_ctxs = {}
def ieee_ctx(es, nbits, rm=RM.RNE):
try:
return used_ctxs[(es, nbits, rm)]
except KeyError:
ctx = IEEECtx(es=es, nbits=nbits, rm=rm)
used_ctxs[(es, nbits, rm)] = ctx
return ctx
class Float(mpnum.MPNum):
_ctx : IEEECtx = ieee_ctx(11, 64)
@property
def ctx(self):
"""The rounding context used to compute this value.
If a computation takes place between two values, then
it will either use a provided context (which will be recorded
on the result) or the more precise of the parent contexts
if none is provided.
"""
return self._ctx
def is_identical_to(self, other):
if isinstance(other, type(self)):
return super().is_identical_to(other) and self.ctx.es == other.ctx.es and self.ctx.nbits == other.ctx.nbits
else:
return super().is_identical_to(other)
def __init__(self, x=None, ctx=None, **kwargs):
if ctx is None:
ctx = type(self)._ctx
if x is None or isinstance(x, digital.Digital):
super().__init__(x=x, **kwargs)
else:
if kwargs:
raise ValueError('cannot specify additional values {}'.format(repr(kwargs)))
f = gmpmath.mpfr(x, ctx.p)
unrounded = gmpmath.mpfr_to_digital(f)
super().__init__(x=self._round_to_context(unrounded, ctx=ctx, strict=True))
self._ctx = ieee_ctx(ctx.es, ctx.nbits, rm=ctx.rm)
def __repr__(self):
return '{}(negative={}, c={}, exp={}, inexact={}, rc={}, isinf={}, isnan={}, ctx={})'.format(
type(self).__name__, repr(self._negative), repr(self._c), repr(self._exp),
repr(self._inexact), repr(self._rc), repr(self._isinf), repr(self._isnan), repr(self._ctx)
)
def __str__(self):
return str(gmpmath.digital_to_mpfr(self))
def __float__(self):
return float(gmpmath.digital_to_mpfr(self))
def __int__(self):
return int(gmpmath.digital_to_mpfr(self))
@classmethod
def _select_context(cls, *args, ctx=None):
if ctx is not None:
return ieee_ctx(ctx.es, ctx.nbits, rm=ctx.rm)
else:
es = max((f.ctx.es for f in args if isinstance(f, cls)))
p = max((f.ctx.p for f in args if isinstance(f, cls)))
return ieee_ctx(es, es + p)
@classmethod
def _round_to_context(cls, unrounded, ctx=None, strict=False):
if ctx is None:
if isinstance(unrounded, cls):
ctx = unrounded.ctx
else:
raise ValueError('no context specified to round {}'.format(repr(unrounded)))
if unrounded.isinf or unrounded.isnan:
return cls(unrounded, ctx=ctx)
magnitude = cls(unrounded, negative=False)
if magnitude < ctx.fbound:
return cls(unrounded.round_new(max_p=ctx.p, min_n=ctx.n, rm=ctx.rm, strict=strict), ctx=ctx)
#return cls(unrounded.round_new(max_p=ctx.p, min_n=ctx.n, rm=ctx.rm, strict=strict), ctx=ctx)
else:
if magnitude > ctx.fbound or magnitude.rc >= 0:
return cls(negative=unrounded.negative, isinf=True, ctx=ctx)
else:
return cls(unrounded.round_new(max_p=ctx.p, min_n=ctx.n, rm=ctx.rm, strict=strict), ctx=ctx)
# most operations come from mpnum
def isnormal(self):
return not (
self.is_zero()
or self.isinf
or self.isnan
or self.e < self.ctx.emin
)
class Interpreter(interpreter.StandardInterpreter):
dtype = Float
ctype = IEEECtx
def arg_to_digital(self, x, ctx):
return self.dtype(x, ctx=ctx)
def _eval_constant(self, e, ctx):
try:
return None, self.constants[e.value]
except KeyError:
return None, self.round_to_context(gmpmath.compute_constant(e.value, prec=ctx.p), ctx=ctx)
# unfortunately, interpreting these values efficiently requries info from the context,
# so it has to be implemented per interpreter...
def _eval_integer(self, e, ctx):
x = digital.Digital(m=e.i, exp=0, inexact=False)
return None, self.round_to_context(x, ctx=ctx)
def _eval_rational(self, e, ctx):
p = digital.Digital(m=e.p, exp=0, inexact=False)
q = digital.Digital(m=e.q, exp=0, inexact=False)
x = gmpmath.compute(OP.div, p, q, prec=ctx.p)
return None, self.round_to_context(x, ctx=ctx)
def _eval_digits(self, e, ctx):
x = gmpmath.compute_digits(e.m, e.e, e.b, prec=ctx.p)
return None, self.round_to_context(x, ctx=ctx)
def round_to_context(self, x, ctx):
"""Not actually used?"""
return self.dtype._round_to_context(x, ctx=ctx, strict=False)
def digital_to_bits(x, ctx=ieee_ctx(11, 53)):
if ctx.p < 2 or ctx.w < 2:
raise ValueError('format with w={}, p={} cannot be represented with IEEE 754 bit pattern'.format(ctx.w, ctx.p))
try:
rounded = round_to_ieee_ctx(x, ctx)
except sinking.PrecisionError:
rounded = round_to_ieee_ctx(sinking.Sink(x, inexact=False), ctx)
pbits = ctx.p - 1
if rounded.negative:
S = 1
else:
S = 0
if rounded.isnan:
# canonical NaN
return (0 << (ctx.w + pbits)) | (bitmask(ctx.w) << pbits) | (1 << (pbits - 1))
elif rounded.isinf:
return (S << (ctx.w + pbits)) | (bitmask(ctx.w) << pbits) # | 0
elif rounded.is_zero():
return (S << (ctx.w + pbits)) # | (0 << pbits) | 0
c = rounded.c
cbits = rounded.p
e = rounded.e
if e < ctx.emin:
# subnormal
lz = (ctx.emin - 1) - e
if lz > pbits or (lz == pbits and cbits > 0):
raise ValueError('exponent out of range: {}'.format(e))
elif lz + cbits > pbits:
raise ValueError('too much precision: given {}, can represent {}'.format(cbits, pbits - lz))
E = 0
C = c << (lz - (pbits - cbits))
elif e <= ctx.emax:
# normal
if cbits > ctx.p:
raise ValueError('too much precision: given {}, can represent {}'.format(cbits, ctx.p))
elif cbits < ctx.p:
raise ValueError('too little precision: given {}, can represent {}'.format(cbits, ctx.p))
E = e + ctx.emax
C = (c << (ctx.p - cbits)) & bitmask(pbits)
else:
# overflow
raise ValueError('exponent out of range: {}'.format(e))
return (S << (ctx.w + pbits)) | (E << pbits) | C
def bits_to_digital(i, ctx=ieee_ctx(11, 53)):
pbits = ctx.p - 1
S = (i >> (ctx.w + pbits)) & bitmask(1)
E = (i >> pbits) & bitmask(ctx.w)
C = i & bitmask(pbits)
negative = (S == 1)
e = E - ctx.emax
if E == 0:
# subnormal
c = C
exp = -ctx.emax - pbits + 1
elif e <= ctx.emax:
# normal
c = C | (1 << pbits)
exp = e - pbits
else:
# nonreal
if C == 0:
return sinking.Sink(negative=negative, c=0, exp=0, inf=True, rc=0)
else:
return sinking.Sink(negative=False, c=0, exp=0, nan=True, rc=0)
# unfortunately any rc / exactness information is lost
return sinking.Sink(negative=negative, c=c, exp=exp, inexact=False, rc=0)
def show_bitpattern(x, ctx=ieee_ctx(11, 53)):
print(x)
if isinstance(x, int):
i = x
elif isinstance(x, sinking.Sink):
i = digital_to_bits(x, ctx=ctx)
S = i >> (ctx.w + ctx.p - 1)
E = (i >> (ctx.p - 1)) & bitmask(ctx.w)
C = i & bitmask(ctx.p - 1)
if E == 0 or E == bitmask(ctx.w):
hidden = 0
else:
hidden = 1
return ('float{:d}({:d},{:d}): {:01b} {:0'+str(ctx.w)+'b} ({:01b}) {:0'+str(ctx.p-1)+'b}').format(
ctx.w + ctx.p, ctx.w, ctx.p, S, E, hidden, C,
)
# import numpy as np
# import sys
# def bits_to_numpy(i, nbytes=8, dtype=np.float64):
# return np.frombuffer(
# i.to_bytes(nbytes, sys.byteorder),
# dtype=dtype, count=1, offset=0,
# )[0]
| billzorn/fpunreal | titanfp/arithmetic/ieee754.py | Python | mit | 8,338 |
from django.shortcuts import render_to_response, get_object_or_404
from django.http import HttpResponse, Http404
from django.db.models import Q
from django.template import RequestContext
from projects.models import Project, SubProject
from data.models import Protocol, Result, Experiment
from reagents.models import Antibody, Chemical, Construct, Cell
from proteins.models import Protein
from external.models import Contact, Reference
def index(request):
return render_to_response('index.html', context_instance=RequestContext(request))
def search(request):
query = request.GET.get('q', '')
if query:
qset = (
Q(assay__icontains=query)|
Q(experiment__icontains=query)|
Q(cellline__icontains=query)|
Q(protocol__icontains=query)|
Q(antibodies__icontains=query)|
Q(chemicals__icontains=query)|
Q(constructs__icontains=query)|
Q(comments__icontains=query)|
Q(researcher__icontains=query)|
Q(protein__icontains=query)
)
results = Experiment.objects.filter(qset).distinct().order_by('-experiment_date')
else:
results = []
return render_to_response("search.html", {
"results": results,
"query": query
}, context_instance=RequestContext(request))
| davebridges/ExperimentDB | experimentdb/views.py | Python | bsd-3-clause | 1,226 |
import logging
from functools import wraps
from io import FileIO
from os import path
from urlparse import parse_qs, urlparse
from iso8601 import parse_date
from munch import munchify
from restkit import BasicAuth, Resource, request
from restkit.errors import ResourceNotFound
from retrying import retry
from simplejson import dumps, loads
from .exceptions import InvalidResponse, NoToken
logger = logging.getLogger(__name__)
IGNORE_PARAMS = ('uri', 'path')
def verify_file(fn):
@wraps(fn)
def wrapper(self, file_, *args, **kwargs):
if isinstance(file_, basestring):
# Using FileIO here instead of open()
# to be able to override the filename
# which is later used when uploading the file.
#
# Explanation:
#
# 1) Restkit reads the filename
# from "name" attribute of a file-like object,
# there is no other way to specify a filename;
#
# 2) The attribute may contain the full path to file,
# which does not work well as a filename;
#
# 3) The attribute is readonly when using open(),
# unlike FileIO object.
file_ = FileIO(file_, 'rb')
file_.name = path.basename(file_.name)
if hasattr(file_, 'read'):
# A file-like object must have 'read' method
return fn(self, file_, *args, **kwargs)
else:
raise TypeError('Expected either a string '
'containing a path to file or a '
'file-like object, got {}'.format(type(file_)))
return wrapper
class APIBaseClient(Resource):
"""base class for API"""
def __init__(self, key,
host_url,
api_version,
resource,
params=None,
**kwargs):
super(APIBaseClient, self).__init__(
host_url,
filters=[BasicAuth(key, "")],
**kwargs
)
self.prefix_path = '/api/{}/{}'.format(api_version, resource)
if not isinstance(params, dict):
params = {"mode": "_all_"}
self.params = params
self.headers = {"Content-Type": "application/json"}
# To perform some operations (e.g. create a tender)
# we first need to obtain a cookie. For that reason,
# here we send a HEAD request to a neutral URL.
self.head('/api/{}/spore'.format(api_version))
def request(self, method, path=None, payload=None, headers=None,
params_dict=None, **params):
_headers = dict(self.headers)
_headers.update(headers or {})
try:
response = super(APIBaseClient, self).request(
method, path=path, payload=payload, headers=_headers,
params_dict=params_dict, **params
)
if 'Set-Cookie' in response.headers:
self.headers['Cookie'] = response.headers['Set-Cookie']
return response
except ResourceNotFound as e:
if 'Set-Cookie' in e.response.headers:
self.headers['Cookie'] = e.response.headers['Set-Cookie']
raise e
def patch(self, path=None, payload=None, headers=None,
params_dict=None, **params):
""" HTTP PATCH
- payload: string passed to the body of the request
- path: string additionnal path to the uri
- headers: dict, optionnal headers that will
be added to HTTP request.
- params: Optionnal parameterss added to the request
"""
return self.request("PATCH", path=path, payload=payload,
headers=headers, params_dict=params_dict, **params)
def delete(self, path=None, headers=None):
""" HTTP DELETE
- path: string additionnal path to the uri
- headers: dict, optionnal headers that will
be added to HTTP request.
- params: Optionnal parameterss added to the request
"""
return self.request("DELETE", path=path, headers=headers)
def _update_params(self, params):
for key in params:
if key not in IGNORE_PARAMS:
self.params[key] = params[key]
def _create_resource_item(self, url, payload, headers={}):
headers.update(self.headers)
response_item = self.post(
url, headers=headers, payload=dumps(payload)
)
if response_item.status_int == 201:
return munchify(loads(response_item.body_string()))
raise InvalidResponse
def _get_resource_item(self, url, headers={}):
headers.update(self.headers)
response_item = self.get(url, headers=headers)
if response_item.status_int == 200:
return munchify(loads(response_item.body_string()))
raise InvalidResponse
def _patch_resource_item(self, url, payload, headers={}):
headers.update(self.headers)
response_item = self.patch(
url, headers=headers, payload=dumps(payload)
)
if response_item.status_int == 200:
return munchify(loads(response_item.body_string()))
raise InvalidResponse
def _upload_resource_file(self, url, data, headers={}, method='post'):
file_headers = {}
file_headers.update(self.headers)
file_headers.update(headers)
file_headers['Content-Type'] = "multipart/form-data"
response_item = getattr(self, method)(
url, headers=file_headers, payload=data
)
if response_item.status_int in (201, 200):
return munchify(loads(response_item.body_string()))
raise InvalidResponse
def _delete_resource_item(self, url, headers={}):
response_item = self.delete(url, headers=headers)
if response_item.status_int == 200:
return munchify(loads(response_item.body_string()))
raise InvalidResponse
class TendersClient(APIBaseClient):
"""client for tenders"""
def __init__(self, key,
host_url="https://api-sandbox.openprocurement.org",
api_version='2.0',
params=None,
resource='tenders'):
super(TendersClient, self).__init__(key, host_url, api_version, resource, params)
###########################################################################
# GET ITEMS LIST API METHODS
###########################################################################
@retry(stop_max_attempt_number=5)
def get_tenders(self, params={}, feed='changes'):
params['feed'] = feed
try:
self._update_params(params)
response = self.get(
self.prefix_path,
params_dict=self.params)
if response.status_int == 200:
tender_list = munchify(loads(response.body_string()))
self._update_params(tender_list.next_page)
return tender_list.data
except ResourceNotFound:
del self.params['offset']
raise
raise InvalidResponse
def get_latest_tenders(self, date, tender_id):
iso_dt = parse_date(date)
dt = iso_dt.strftime("%Y-%m-%d")
tm = iso_dt.strftime("%H:%M:%S")
response = self._get_resource_item(
'{}?offset={}T{}&opt_fields=tender_id&mode=test'.format(
self.prefix_path,
dt,
tm
)
)
if response.status_int == 200:
tender_list = munchify(loads(response.body_string()))
self._update_params(tender_list.next_page)
return tender_list.data
raise InvalidResponse
def _get_tender_resource_list(self, tender, items_name):
return self._get_resource_item(
'{}/{}/{}'.format(self.prefix_path, tender.data.id, items_name),
headers={'X-Access-Token':
getattr(getattr(tender, 'access', ''), 'token', '')}
)
def get_questions(self, tender, params={}):
return self._get_tender_resource_list(tender, "questions")
def get_documents(self, tender, params={}):
return self._get_tender_resource_list(tender, "documents")
def get_awards_documents(self, tender, award_id, params={}):
return self._get_resource_item(
'{}/{}/awards/{}/documents'.format(self.prefix_path, tender.data.id, award_id),
headers={'X-Access-Token':
getattr(getattr(tender, 'access', ''), 'token', '')}
)
def get_qualification_documents(self, tender, qualification_id, params={}):
return self._get_resource_item(
'{}/{}/qualifications/{}/documents'.format(self.prefix_path, tender.data.id, qualification_id),
headers={'X-Access-Token':
getattr(getattr(tender, 'access', ''), 'token', '')}
)
def get_awards(self, tender, params={}):
return self._get_tender_resource_list(tender, "awards")
def get_lots(self, tender, params={}):
return self._get_tender_resource_list(tender, "lots")
###########################################################################
# CREATE ITEM API METHODS
###########################################################################
def _create_tender_resource_item(self, tender, item_obj, items_name):
return self._create_resource_item(
'{}/{}/{}'.format(self.prefix_path, tender.data.id, items_name),
item_obj,
headers={'X-Access-Token':
getattr(getattr(tender, 'access', ''), 'token', '')}
)
def create_tender(self, tender):
return self._create_resource_item(self.prefix_path, tender)
def create_question(self, tender, question):
return self._create_tender_resource_item(tender, question, "questions")
def create_bid(self, tender, bid):
return self._create_tender_resource_item(tender, bid, "bids")
def create_lot(self, tender, lot):
return self._create_tender_resource_item(tender, lot, "lots")
def create_award(self, tender, award):
return self._create_tender_resource_item(tender, award, "awards")
def create_cancellation(self, tender, cancellation):
return self._create_tender_resource_item(tender, cancellation, "cancellations")
def create_complaint(self, tender, complaint):
return self._create_tender_resource_item(tender, complaint, "complaints")
def create_award_complaint(self, tender, complaint, award_id):
return self._create_resource_item(
'{}/{}/{}'.format(self.prefix_path, tender.data.id, "awards/{0}/complaints".format(award_id)),
complaint,
headers={'X-Access-Token':
getattr(getattr(tender, 'access', ''), 'token', '')}
)
def create_thin_document(self, tender, document_data):
return self._create_resource_item(
'{}/{}/documents'.format(
self.prefix_path,
tender.data.id
),
document_data,
headers={'X-Access-Token':
getattr(getattr(tender, 'access', ''), 'token', '')}
)
###########################################################################
# GET ITEM API METHODS
###########################################################################
def get_tender(self, id):
return self._get_resource_item('{}/{}'.format(self.prefix_path, id))
def _get_tender_resource_item(self, tender, item_id, items_name,
access_token=""):
if access_token:
headers = {'X-Access-Token': access_token}
else:
headers = {'X-Access-Token':
getattr(getattr(tender, 'access', ''), 'token', '')}
return self._get_resource_item(
'{}/{}/{}/{}'.format(self.prefix_path,
tender.data.id,
items_name,
item_id),
headers=headers
)
def get_question(self, tender, question_id):
return self._get_tender_resource_item(tender, question_id, "questions")
def get_bid(self, tender, bid_id, access_token):
return self._get_tender_resource_item(tender, bid_id, "bids",
access_token)
def get_lot(self, tender, lot_id):
return self._get_tender_resource_item(tender, lot_id, "lots")
def get_file(self, tender, url, access_token=None):
parsed_url = urlparse(url)
headers = {}
if access_token:
headers = {'X-Access-Token': access_token}
headers.update(self.headers)
response_item = self.get(parsed_url.path,
headers=headers,
params_dict=parse_qs(parsed_url.query))
if response_item.status_int == 302:
response_obj = request(response_item.headers['location'])
if response_obj.status_int == 200:
return response_obj.body_string(), \
response_obj.headers['Content-Disposition'] \
.split("; filename=")[1].strip('"')
raise InvalidResponse
def extract_credentials(self, id):
return self._get_resource_item('{}/{}/extract_credentials'.format(self.prefix_path, id))
###########################################################################
# PATCH ITEM API METHODS
###########################################################################
def _patch_tender_resource_item(self, tender, item_obj, items_name):
return self._patch_resource_item(
'{}/{}/{}/{}'.format(
self.prefix_path, tender.data.id, items_name, item_obj['data']['id']
),
payload=item_obj,
headers={'X-Access-Token':
getattr(getattr(tender, 'access', ''), 'token', '')}
)
def patch_tender(self, tender):
return self._patch_resource_item(
'{}/{}'.format(self.prefix_path, tender["data"]["id"]),
payload=tender,
headers={'X-Access-Token':
getattr(getattr(tender, 'access', ''), 'token', '')}
)
def patch_question(self, tender, question):
return self._patch_tender_resource_item(tender, question, "questions")
def patch_bid(self, tender, bid):
return self._patch_tender_resource_item(tender, bid, "bids")
def patch_bid_document(self, tender, document_data, bid_id, document_id):
return self._patch_resource_item(
'{}/{}/{}/{}/documents/{}'.format(
self.prefix_path, tender.data.id, "bids", bid_id, document_id
),
payload=document_data,
headers={'X-Access-Token':
getattr(getattr(tender, 'access', ''), 'token', '')}
)
def patch_award(self, tender, award):
return self._patch_tender_resource_item(tender, award, "awards")
def patch_award_document(self, tender, document_data, award_id, document_id):
return self._patch_resource_item(
'{}/{}/{}/{}/documents/{}'.format(
self.prefix_path, tender.data.id, "awards", award_id, document_id
),
payload=document_data,
headers={'X-Access-Token':
getattr(getattr(tender, 'access', ''), 'token', '')}
)
def patch_cancellation(self, tender, cancellation):
return self._patch_tender_resource_item(tender, cancellation, "cancellations")
def patch_cancellation_document(self, tender, cancellation, cancellation_id, cancellation_doc_id):
return self._patch_resource_item(
'{}/{}/{}/{}/documents/{}'.format(
self.prefix_path, tender.data.id, "cancellations", cancellation_id, cancellation_doc_id
),
payload=cancellation,
headers={'X-Access-Token':
getattr(getattr(tender, 'access', ''), 'token', '')}
)
def patch_complaint(self, tender, complaint):
return self._patch_tender_resource_item(tender, complaint, "complaints")
def patch_award_complaint(self, tender, complaint, award_id):
return self._patch_resource_item(
'{}/{}/awards/{}/complaints/{}'.format(
self.prefix_path, tender.data.id, award_id, complaint.data.id
),
payload=complaint,
headers={'X-Access-Token':
getattr(getattr(tender, 'access', ''), 'token', '')}
)
def patch_lot(self, tender, lot):
return self._patch_tender_resource_item(tender, lot, "lots")
def patch_document(self, tender, document):
return self._patch_tender_resource_item(tender, document, "documents")
def patch_qualification(self, tender, qualification):
return self._patch_tender_resource_item(tender, qualification, "qualifications")
def patch_contract(self, tender, contract):
return self._patch_tender_resource_item(tender, contract, "contracts")
def patch_contract_document(self, tender, document_data, contract_id, document_id):
return self._patch_resource_item(
'{}/{}/{}/{}/documents/{}'.format(
self.prefix_path, tender.data.id, "contracts", contract_id, document_id
),
payload=document_data,
headers={'X-Access-Token':
getattr(getattr(tender, 'access', ''), 'token', '')}
)
def patch_credentials(self, id, access_token):
return self._patch_resource_item('{}/{}/credentials'.format(self.prefix_path, id),
payload={},
headers={'X-Access-Token': access_token})
###########################################################################
# UPLOAD FILE API METHODS
###########################################################################
@verify_file
def upload_document(self, file_, tender):
return self._upload_resource_file(
'{}/{}/documents'.format(
self.prefix_path,
tender.data.id
),
data={"file": file_},
headers={'X-Access-Token':
getattr(getattr(tender, 'access', ''), 'token', '')}
)
@verify_file
def upload_bid_document(self, file_, tender, bid_id, doc_type="documents"):
return self._upload_resource_file(
'{}/{}/bids/{}/{}'.format(
self.prefix_path,
tender.data.id,
bid_id,
doc_type
),
data={"file": file_},
headers={'X-Access-Token':
getattr(getattr(tender, 'access', ''), 'token', '')}
)
@verify_file
def update_bid_document(self, file_, tender, bid_id, document_id, doc_type="documents"):
return self._upload_resource_file(
'{}/{}/bids/{}/{}/{}'.format(
self.prefix_path,
tender.data.id,
bid_id,
doc_type,
document_id
),
data={"file": file_},
headers={'X-Access-Token':
getattr(getattr(tender, 'access', ''), 'token', '')},
method='put'
)
@verify_file
def upload_cancellation_document(self, file_, tender, cancellation_id):
return self._upload_resource_file(
'{}/{}/cancellations/{}/documents'.format(
self.prefix_path,
tender.data.id,
cancellation_id
),
data={"file": file_},
headers={'X-Access-Token':
getattr(getattr(tender, 'access', ''), 'token', '')}
)
@verify_file
def update_cancellation_document(self, file_, tender, cancellation_id, document_id):
return self._upload_resource_file(
'{}/{}/cancellations/{}/documents/{}'.format(
self.prefix_path,
tender.data.id,
cancellation_id,
document_id
),
data={"file": file_},
headers={'X-Access-Token':
getattr(getattr(tender, 'access', ''), 'token', '')},
method='put'
)
@verify_file
def upload_complaint_document(self, file_, tender, complaint_id):
return self._upload_resource_file(
'{}/{}/complaints/{}/documents'.format(
self.prefix_path,
tender.data.id,
complaint_id),
data={"file": file_},
headers={'X-Access-Token':
getattr(getattr(tender, 'access', ''), 'token', '')}
)
@verify_file
def upload_award_complaint_document(self, file_, tender, award_id, complaint_id):
return self._upload_resource_file(
'{}/{}/awards/{}/complaints/{}/documents'.format(
self.prefix_path,
tender.data.id,
award_id,
complaint_id),
data={"file": file_},
headers={'X-Access-Token':
getattr(getattr(tender, 'access', ''), 'token', '')}
)
@verify_file
def upload_qualification_document(self, file_, tender, qualification_id):
return self._upload_resource_file(
'{}/{}/qualifications/{}/documents'.format(
self.prefix_path,
tender.data.id,
qualification_id
),
data={"file": file_},
headers={'X-Access-Token':
getattr(getattr(tender, 'access', ''), 'token', '')}
)
@verify_file
def upload_award_document(self, file_, tender, award_id, doc_type="documents"):
return self._upload_resource_file(
'{}/{}/awards/{}/{}'.format(
self.prefix_path,
tender.data.id,
award_id,
doc_type
),
data={"file": file_},
headers={'X-Access-Token':
getattr(getattr(tender, 'access', ''), 'token', '')}
)
@verify_file
def upload_contract_document(self, file_, tender, contract_id, doc_type="documents"):
return self._upload_resource_file(
'{}/{}/contracts/{}/documents'.format(
self.prefix_path,
tender.data.id,
contract_id,
doc_type
),
data={"file": file_},
headers={'X-Access-Token':
getattr(getattr(tender, 'access', ''), 'token', '')}
)
###########################################################################
# DELETE ITEMS LIST API METHODS
###########################################################################
def delete_bid(self, tender, bid, access_token=None):
logger.info("delete_lot is deprecated. In next update this function will takes bid_id and access_token instead bid.")
if isinstance(bid, basestring):
bid_id = bid
access_token = access_token
else:
bid_id = bid.data.id
access_token = getattr(getattr(bid, 'access', ''), 'token', '')
return self._delete_resource_item(
'{}/{}/bids/{}'.format(
self.prefix_path,
tender.data.id,
bid_id
),
headers={'X-Access-Token': access_token}
)
def delete_lot(self, tender, lot):
logger.info("delete_lot is deprecated. In next update this function will takes lot_id instead lot.")
if isinstance(lot, basestring):
lot_id = lot
else:
lot_id = lot.data.id
return self._delete_resource_item(
'{}/{}/lots/{}'.format(
self.prefix_path,
tender.data.id,
lot_id
),
headers={'X-Access-Token':
getattr(getattr(tender, 'access', ''), 'token', '')}
)
###########################################################################
class Client(TendersClient):
"""client for tenders for backward compatibility"""
class TendersClientSync(TendersClient):
def sync_tenders(self, params={}, extra_headers={}):
params['feed'] = 'changes'
self.headers.update(extra_headers)
response = self.get(self.prefix_path, params_dict=params)
if response.status_int == 200:
tender_list = munchify(loads(response.body_string()))
return tender_list
@retry(stop_max_attempt_number=5)
def get_tender(self, id, extra_headers={}):
self.headers.update(extra_headers)
return super(TendersClientSync, self).get_tender(id)
class EDRClient(Resource):
""" Client for validate members by EDR """
def __init__(self, host_url, api_version, username, password, **kwargs):
prefix_path = '{}/api/{}'.format(host_url, api_version)
super(EDRClient, self).__init__(prefix_path,
filters=[BasicAuth(username, password)],
**kwargs)
self.headers = {"Content-Type": "application/json"}
def request(self, method, path=None, payload=None, headers=None,
params_dict=None, **params):
_headers = dict(self.headers)
_headers.update(headers or {})
try:
response = super(EDRClient, self).request(
method, path=path, payload=payload, headers=_headers,
params_dict=params_dict, **params
)
if 'Set-Cookie' in response.headers:
self.headers['Cookie'] = response.headers['Set-Cookie']
return response
except ResourceNotFound as e:
if 'Set-Cookie' in e.response.headers:
self.headers['Cookie'] = e.response.headers['Set-Cookie']
raise e
def verify_member(self, edrpou, headers=None):
response = self.request("GET", "/verify",
params_dict={'id': edrpou},
headers=headers)
if response.status_int == 200:
return munchify(loads(response.body_string()))
raise InvalidResponse
| openprocurement/openprocurement.client.python | openprocurement_client/client.py | Python | apache-2.0 | 26,776 |
# -*- coding: utf-8 -*-
# Copyright (C) 2014 Canonical
#
# Authors:
# Didier Roche
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; version 3.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
"""Tests for the download center module using a local server"""
import apt
import os
import shutil
import stat
import subprocess
import tempfile
from time import time
from ..tools import get_data_dir, LoggedTestCase, manipulate_path_env
from unittest.mock import Mock, call, patch
import umake
from umake.network.requirements_handler import RequirementsHandler
from umake import tools
class TestRequirementsHandler(LoggedTestCase):
"""This will test the download center by sending one or more download requests"""
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.handler = RequirementsHandler()
apt.apt_pkg.config.set("Dir::Cache::pkgcache", "")
apt.apt_pkg.config.set("Dir::Cache::srcpkgcache", "")
apt.apt_pkg.config.clear("APT::Update::Post-Invoke")
apt.apt_pkg.config.clear("APT::Update::Post-Invoke-Success")
apt.apt_pkg.config.clear("DPkg::Post-Invoke")
cls.apt_package_dir = os.path.join(get_data_dir(), "apt")
cls.apt_status_dir = os.path.join(cls.apt_package_dir, "states")
def setUp(self):
super().setUp()
self.chroot_path = tempfile.mkdtemp()
# create the fake dpkg wrapper
os.makedirs(os.path.join(self.chroot_path, "usr", "bin"))
self.dpkg = os.path.join(self.chroot_path, "usr", "bin", "dpkg")
with open(self.dpkg, "w") as f:
f.write("#!/bin/sh\nfakeroot /usr/bin/dpkg --root={root} --force-not-root --force-bad-path "
"--log={root}/var/log/dpkg.log \"$@\"".format(root=self.chroot_path))
st = os.stat(self.dpkg)
os.chmod(self.dpkg, st.st_mode | stat.S_IEXEC)
# for arch cache support
tools._current_arch = None
tools._foreign_arch = None
manipulate_path_env(os.path.dirname(self.dpkg))
# apt requirements
apt_etc = os.path.join(self.chroot_path, 'etc', 'apt')
os.makedirs(apt_etc)
os.makedirs(os.path.join(self.chroot_path, 'var', 'log', 'apt'))
with open(os.path.join(apt_etc, 'sources.list'), 'w') as f:
f.write('deb file:{} /'.format(self.apt_package_dir))
# dpkg requirements
dpkg_dir = os.path.join(self.chroot_path, 'var', 'lib', 'dpkg')
os.makedirs(dpkg_dir)
os.mkdir(os.path.join(os.path.join(dpkg_dir, 'info')))
os.mkdir(os.path.join(os.path.join(dpkg_dir, 'triggers')))
os.mkdir(os.path.join(os.path.join(dpkg_dir, 'updates')))
open(os.path.join(dpkg_dir, 'status'), 'w').close()
open(os.path.join(dpkg_dir, 'available'), 'w').close()
self.dpkg_dir = dpkg_dir
cache = apt.Cache(rootdir=self.chroot_path)
apt.apt_pkg.config.set("Dir::Bin::dpkg", self.dpkg) # must be called after initializing the rootdir cache
cache.update()
cache.open()
self.handler.cache = cache
self.done_callback = Mock()
self._saved_seteuid_fn = os.seteuid
self._saved_setegid_fn = os.setegid
self._saved_geteuid_fn = os.geteuid
self._saved_getenv = os.getenv
self.user_uid, self.user_gid = (4242, 4242)
os.seteuid = Mock()
os.setegid = Mock()
os.geteuid = Mock()
os.geteuid.return_value = self.user_uid
os.getenv = Mock(side_effect=self._mock_get_env)
def tearDown(self):
# remove arch cache support
manipulate_path_env(os.path.dirname(self.dpkg), remove=True)
tools._current_arch = None
tools._foreign_arch = None
shutil.rmtree(self.chroot_path)
os.seteuid = self._saved_seteuid_fn
os.setegid = self._saved_setegid_fn
os.geteuid = self._saved_geteuid_fn
os.getenv = self._saved_getenv
super().tearDown()
def _mock_get_env(self, env, default=None):
if os.geteuid() == 0:
if env == "SUDO_UID":
return str(self.user_uid)
elif env == "SUDO_GID":
return str(self.user_gid)
return self._saved_getenv(env)
def count_number_progress_call(self, call_args_list, tag):
"""Count the number of tag in progress call and return it"""
count = 0
for call in call_args_list:
if call[0][0]['step'] == tag:
count += 1
return count
def wait_for_callback(self, mock_function_to_be_called, timeout=10):
"""wait for the callback to be called until a timeout.
Add temp files to the clean file list afterwards"""
timeout_time = time() + timeout
while not mock_function_to_be_called.called:
if time() > timeout_time:
raise(BaseException("Function not called within {} seconds".format(timeout)))
def test_singleton(self):
"""Ensure we are delivering a singleton for RequirementsHandler"""
other = RequirementsHandler()
self.assertEqual(self.handler, other)
def test_install(self):
"""Install one package"""
self.handler.install_bucket(["testpackage"], lambda x: "", self.done_callback)
self.wait_for_callback(self.done_callback)
self.assertEqual(self.done_callback.call_args[0][0].bucket, ['testpackage'])
self.assertIsNone(self.done_callback.call_args[0][0].error)
self.assertTrue(self.handler.is_bucket_installed(["testpackage"]))
def test_install_multi_arch_current_arch(self):
"""We install a multi_arch package corresponding to current arch"""
multi_arch_name = "testpackage:{}".format(tools.get_current_arch())
self.handler.install_bucket([multi_arch_name], lambda x: "", self.done_callback)
self.wait_for_callback(self.done_callback)
self.assertEqual(self.done_callback.call_args[0][0].bucket, [multi_arch_name])
self.assertIsNone(self.done_callback.call_args[0][0].error)
self.assertTrue(self.handler.is_bucket_installed(["testpackage"]))
def test_install_perm(self):
"""When we install one package, we first switch to root"""
self.handler.install_bucket(["testpackage"], lambda x: "", self.done_callback)
self.wait_for_callback(self.done_callback)
os.seteuid.assert_called_once_with(0)
os.setegid.assert_called_once_with(0)
def test_install_return_error_if_no_perm(self):
"""Return an exception when we try to install and we can't switch to root"""
os.seteuid.side_effect = PermissionError()
self.handler.install_bucket(["testpackage"], lambda x: "", self.done_callback)
self.wait_for_callback(self.done_callback)
self.assertIsNotNone(self.done_callback.call_args[0][0].error)
self.assertFalse(self.handler.is_bucket_installed(["testpackage"]))
self.expect_warn_error = True
def test_install_perm_switch_back_user(self):
"""When we install one package, we switch back to user at the end"""
umake.network.requirements_handler.os.geteuid.return_value = 0
self.handler.install_bucket(["testpackage"], lambda x: "", self.done_callback)
self.wait_for_callback(self.done_callback)
# we call it twice and the latest is the user id
self.assertEqual(os.seteuid.call_count, 2)
self.assertEqual(os.seteuid.call_args, call(self.user_uid))
self.assertEqual(os.setegid.call_args, call(self.user_gid))
def test_install_progress(self):
"""Install one package and get progress feedback"""
progress_callback = Mock()
self.handler.install_bucket(["testpackage"], progress_callback, self.done_callback)
self.wait_for_callback(self.done_callback)
downloading_msg = self.count_number_progress_call(progress_callback.call_args_list,
RequirementsHandler.STATUS_DOWNLOADING)
installing_msg = self.count_number_progress_call(progress_callback.call_args_list,
RequirementsHandler.STATUS_INSTALLING)
self.assertTrue(downloading_msg > 1)
self.assertTrue(installing_msg > 1)
# the first download call is at 0% of progress. testpackage is 1byte to download
self.assertEqual(progress_callback.call_args_list[0][0][0],
{'step': 0, 'pkg_size_download': 1, 'percentage': 0.0})
self.assertEqual(progress_callback.call_args_list[2][0][0],
{'step': 1, 'percentage': 0.0})
def test_install_multiple_packages(self):
"""Install multiple packages in one shot"""
self.handler.install_bucket(["testpackage", "testpackage0"], lambda x: "", self.done_callback)
self.wait_for_callback(self.done_callback)
self.assertEqual(self.done_callback.call_args[0][0].bucket, ['testpackage', 'testpackage0'])
self.assertIsNone(self.done_callback.call_args[0][0].error)
self.assertTrue(self.handler.is_bucket_installed(["testpackage", "testpackage0"]))
def test_install_multiple_packages_progress(self):
"""Install multiple packages in one shot and ensure that progress is global"""
progress_callback = Mock()
self.handler.install_bucket(["testpackage", "testpackage0"], progress_callback, self.done_callback)
self.wait_for_callback(self.done_callback)
downloading_msg = self.count_number_progress_call(progress_callback.call_args_list,
RequirementsHandler.STATUS_DOWNLOADING)
installing_msg = self.count_number_progress_call(progress_callback.call_args_list,
RequirementsHandler.STATUS_INSTALLING)
self.assertTrue(downloading_msg > 1)
self.assertTrue(installing_msg > 1)
# the first download call is at 0% of progress. testpackage is 1byte to download
self.assertEqual(progress_callback.call_args_list[0][0][0],
{'step': 0, 'pkg_size_download': 1, 'percentage': 0.0})
def test_install_pending(self):
"""Appending two installations and wait for results. Only the first call should have progress"""
done_callback0 = Mock()
self.handler.install_bucket(["testpackage"], lambda x: "", self.done_callback)
self.handler.install_bucket(["testpackage0"], lambda x: "", done_callback0)
self.wait_for_callback(self.done_callback)
self.wait_for_callback(done_callback0)
self.assertTrue(self.handler.is_bucket_installed(["testpackage", "testpackage0"]))
def test_install_pending_order(self):
"""Installation order of pending requests are respected"""
done_callback = Mock()
done_callback.side_effect = self.done_callback
done_callback0 = Mock()
done_callback0.side_effect = self.done_callback
ordered_progress_callback = Mock()
progress_callback = Mock()
progress_callback.side_effect = ordered_progress_callback
progress_callback0 = Mock()
progress_callback0.side_effect = ordered_progress_callback
self.handler.install_bucket(["testpackage"], progress_callback, done_callback)
self.handler.install_bucket(["testpackage0"], progress_callback0, done_callback0)
self.wait_for_callback(done_callback)
self.wait_for_callback(done_callback0)
self.assertEqual(self.done_callback.call_args_list,
[call(RequirementsHandler.RequirementsResult(bucket=['testpackage'], error=None)),
call(RequirementsHandler.RequirementsResult(bucket=['testpackage0'], error=None))])
# we will get progress with 0, 1 (first bucket), 0, 1 (second bucket). So 4 progress signal status change
current_status = RequirementsHandler.STATUS_DOWNLOADING
current_status_change_count = 1
calls = ordered_progress_callback.call_args_list
for current_call in calls[1:]:
if current_call[0][0]['step'] != current_status:
current_status = current_call[0][0]['step']
current_status_change_count += 1
self.assertEqual(current_status_change_count, 4)
def test_install_pending_callback_not_mixed(self):
"""Callbacks are separated on pending requests"""
done_callback = Mock()
done_callback.side_effect = self.done_callback
done_callback0 = Mock()
done_callback0.side_effect = self.done_callback
global_progress_callback = Mock()
progress_callback = Mock()
progress_callback.side_effect = global_progress_callback
progress_callback0 = Mock()
progress_callback0.side_effect = global_progress_callback
self.handler.install_bucket(["testpackage"], progress_callback, done_callback)
self.handler.install_bucket(["testpackage0"], progress_callback0, done_callback0)
self.wait_for_callback(done_callback)
self.wait_for_callback(done_callback0)
self.assertTrue(progress_callback.call_count < global_progress_callback.call_count)
self.assertTrue(progress_callback0.call_count < global_progress_callback.call_count)
self.assertTrue(done_callback.call_count < self.done_callback.call_count)
self.assertTrue(done_callback0.call_count < self.done_callback.call_count)
def test_install_twice(self):
"""Test appending two installations and wait for results. Only the first call should have progress"""
progress_callback = Mock()
progress_second_callback = Mock()
done_callback = Mock()
self.handler.install_bucket(["testpackage"], progress_callback, done_callback)
self.handler.install_bucket(["testpackage"], progress_second_callback, self.done_callback)
self.wait_for_callback(done_callback)
self.wait_for_callback(self.done_callback)
self.assertTrue(self.handler.is_bucket_installed(["testpackage"]))
self.assertFalse(progress_second_callback.called)
def test_deps(self):
"""Installing one package, ensure the dep (even with auto_fix=False) is installed"""
self.handler.install_bucket(["testpackage1"], lambda x: "", self.done_callback)
self.wait_for_callback(self.done_callback)
self.assertTrue(self.handler.is_bucket_installed(["testpackage1", "testpackage"]))
def test_fail(self):
"""An error is caught when asking for the impossible (installing 2 packages in conflicts)"""
self.handler.install_bucket(["testpackage", "testpackage2"], lambda x: "", self.done_callback)
self.wait_for_callback(self.done_callback)
self.assertIsNotNone(self.done_callback.call_args[0][0].error)
both_package_installed = self.handler.is_bucket_installed(["testpackage"]) and \
self.handler.is_bucket_installed(["testpackage2"])
self.assertFalse(both_package_installed)
self.expect_warn_error = True
def test_install_shadow_pkg(self):
"""We return an error if we try to install a none existing package"""
self.handler.install_bucket(["foo"], lambda x: "", self.done_callback)
self.wait_for_callback(self.done_callback)
self.assertIsNotNone(self.done_callback.call_args[0][0].error)
self.expect_warn_error = True
def test_error_in_dpkg(self):
"""An error while installing a package is caught"""
with open(self.dpkg, mode='w') as f:
f.write("#!/bin/sh\nexit 1") # Simulate an error in dpkg
self.handler.install_bucket(["testpackage"], lambda x: "", self.done_callback)
self.wait_for_callback(self.done_callback)
self.assertIsNotNone(self.done_callback.call_args[0][0].error)
self.expect_warn_error = True
def test_is_installed_bucket_installed(self):
"""Install bucket should return True if a bucket is installed"""
self.handler.install_bucket(["testpackage", "testpackage1"], lambda x: "", self.done_callback)
self.wait_for_callback(self.done_callback)
self.assertTrue(self.handler.is_bucket_installed(['testpackage', 'testpackage1']))
def test_is_installed_bucket_half_installed(self):
"""Install bucket shouldn't be considered installed if not fully installed"""
self.handler.install_bucket(["testpackage"], lambda x: "", self.done_callback)
self.wait_for_callback(self.done_callback)
self.assertFalse(self.handler.is_bucket_installed(['testpackage', 'testpackage1']))
def test_is_installed_bucket_not_installed(self):
"""Install bucket should return False if a bucket is not installed"""
self.assertFalse(self.handler.is_bucket_installed(['testpackage', 'testpackage1']))
def test_is_bucket_installed_multi_arch_current_arch(self):
"""Installed bucket should return True even if contains multi-arch part with current package"""
self.handler.install_bucket(["testpackage"], lambda x: "", self.done_callback)
self.wait_for_callback(self.done_callback)
self.assertTrue(self.handler.is_bucket_installed(["testpackage:{}".format(tools.get_current_arch())]))
def test_is_bucket_installed_with_unavailable_package(self):
"""Bucket isn't installed if some package are even not in the cache"""
self.assertFalse(self.handler.is_bucket_installed(["testpackagedoesntexist"]))
def test_is_bucket_installed_with_unavailable_multiarch_package(self):
"""Bucket isn't installed if some multiarch package are even not in the cache"""
self.assertFalse(self.handler.is_bucket_installed(["testpackagedoesntexist:foo"]))
def test_is_bucket_installed_with_foreign_archs_package_not_installed(self):
"""After adding a foreign arch, test that the package is not installed and report so"""
subprocess.call([self.dpkg, "--add-architecture", "foo"])
self.handler.cache.open() # reopen the cache with the new added architecture
self.assertFalse(self.handler.is_bucket_installed(['testpackagefoo:foo']))
def test_is_bucket_uptodate_bucket_uptodate(self):
"""Up to date bucket is reported as such"""
self.handler.install_bucket(["testpackage", "testpackage1"], lambda x: "", self.done_callback)
self.wait_for_callback(self.done_callback)
self.assertTrue(self.handler.is_bucket_uptodate(['testpackage', 'testpackage1']))
def test_is_bucket_uptodate_bucket_not_installed(self):
"""Not installed bucket is not uptodate"""
self.assertFalse(self.handler.is_bucket_uptodate(['testpackage', 'testpackage1']))
def test_is_bucket_uptodate_bucket_half_installed(self):
"""bucket shouldn't be considered up to date if not fully installed"""
self.handler.install_bucket(["testpackage"], lambda x: "", self.done_callback)
self.wait_for_callback(self.done_callback)
self.assertFalse(self.handler.is_bucket_uptodate(['testpackage', 'testpackage1']))
def test_is_bucket_uptodate_multi_arch_current_arch(self):
"""Installed bucket should return as being uptodate even if contains multi-arch part with current package"""
self.handler.install_bucket(["testpackage"], lambda x: "", self.done_callback)
self.wait_for_callback(self.done_callback)
self.assertTrue(self.handler.is_bucket_uptodate(["testpackage:{}".format(tools.get_current_arch())]))
def test_is_bucket_uptodate_with_unavailable_package(self):
"""Bucket isn't uptodate if some package are even not in the cache"""
self.assertFalse(self.handler.is_bucket_uptodate(["testpackagedoesntexist"]))
def test_is_bucket_uptodate_with_unavailable_multiarch_package(self):
"""Bucket isn't uptodate if some multiarch package are even not in the cache"""
self.assertFalse(self.handler.is_bucket_uptodate(["testpackagedoesntexist:foo"]))
def test_is_bucket_uptodate_with_foreign_archs(self):
"""After adding a foreign arch, test that the package is uptodate and report so"""
subprocess.call([self.dpkg, "--add-architecture", "foo"])
self.handler.cache.open() # reopen the cache with the new added architecture
self.handler.install_bucket(["testpackagefoo:foo"], lambda x: "", self.done_callback)
self.wait_for_callback(self.done_callback)
self.assertTrue(self.handler.is_bucket_uptodate(['testpackagefoo:foo']))
def test_is_bucket_uptodate_with_foreign_archs_package_not_installed(self):
"""After adding a foreign arch, test that the package is not uptodate and report so"""
subprocess.call([self.dpkg, "--add-architecture", "foo"])
self.handler.cache.open() # reopen the cache with the new added architecture
self.assertFalse(self.handler.is_bucket_uptodate(['testpackagefoo:foo']))
def test_is_bucket_uptodate_with_possible_upgrade(self):
"""If one package of the bucket can be upgraded, tell it's not up to date"""
shutil.copy(os.path.join(self.apt_status_dir, "testpackage_installed_dpkg_status"),
os.path.join(self.dpkg_dir, "status"))
self.handler.cache.open()
self.assertFalse(self.handler.is_bucket_uptodate(["testpackage"]))
def test_is_bucket_available(self):
"""An available bucket on that platform is reported"""
self.assertTrue(self.handler.is_bucket_available(['testpackage', 'testpackage1']))
def test_is_bucket_available_multi_arch_current_arch(self):
"""We return a package is available on the current platform"""
self.assertTrue(self.handler.is_bucket_available(['testpackage:{}'.format(tools.get_current_arch())]))
def test_unavailable_bucket(self):
"""An unavailable bucket on that platform is reported"""
self.assertFalse(self.handler.is_bucket_available(['testpackage42', 'testpackage404']))
def test_is_bucket_available_foreign_archs(self):
"""After adding a foreign arch, test that the package is available on it"""
subprocess.call([self.dpkg, "--add-architecture", "foo"])
self.handler.cache.open() # reopen the cache with the new added architecture
self.assertTrue(self.handler.is_bucket_available(['testpackagefoo:foo', 'testpackage1']))
def test_is_bucket_unavailable_with_foreign_archs(self):
"""After adding a foreign arch, test that the package is unavailable and report so"""
subprocess.call([self.dpkg, "--add-architecture", "foo"])
self.handler.cache.open() # reopen the cache with the new added architecture
self.assertFalse(self.handler.is_bucket_available(['testpackagebar:foo', 'testpackage1']))
def test_bucket_unavailable_but_foreign_archs_no_added(self):
"""Bucket is set as available when foreign arch not added"""
self.assertTrue(self.handler.is_bucket_available(['testpackagefoo:foo', 'testpackage1']))
def test_bucket_unavailable_foreign_archs_no_added_another_package_not_available(self):
"""Bucket is set as unavailable when foreign arch not added, but another package on current arch is
unavailable"""
self.assertFalse(self.handler.is_bucket_available(['testpackagefoo:foo', 'testpackage123']))
def test_apt_cache_not_ready(self):
"""When the first apt.Cache() access tells it's not ready, we wait and recover"""
origin_open = self.handler.cache.open
raise_returned = False
def cache_call(*args, **kwargs):
nonlocal raise_returned
if raise_returned:
return origin_open()
else:
raise_returned = True
raise SystemError
with patch.object(self.handler.cache, 'open', side_effect=cache_call) as openaptcache_mock:
self.handler.install_bucket(["testpackage"], lambda x: "", self.done_callback)
self.wait_for_callback(self.done_callback)
self.assertEqual(openaptcache_mock.call_count, 2)
def test_upgrade(self):
"""Upgrade one package already installed"""
shutil.copy(os.path.join(self.apt_status_dir, "testpackage_installed_dpkg_status"),
os.path.join(self.dpkg_dir, "status"))
self.handler.cache.open()
self.assertTrue(self.handler.is_bucket_installed(["testpackage"]))
self.assertEqual(self.handler.cache["testpackage"].installed.version, "0.0.0")
self.handler.install_bucket(["testpackage"], lambda x: "", self.done_callback)
self.wait_for_callback(self.done_callback)
self.assertEqual(self.done_callback.call_args[0][0].bucket, ['testpackage'])
self.assertIsNone(self.done_callback.call_args[0][0].error)
self.assertTrue(self.handler.is_bucket_installed(["testpackage"]))
self.assertEqual(self.handler.cache["testpackage"].installed.version, "0.0.1")
def test_one_install_one_upgrade(self):
"""Install and Upgrade one package in the same bucket"""
shutil.copy(os.path.join(self.apt_status_dir, "testpackage_installed_dpkg_status"),
os.path.join(self.dpkg_dir, "status"))
self.handler.cache.open()
self.assertTrue(self.handler.is_bucket_installed(["testpackage"]))
self.assertEqual(self.handler.cache["testpackage"].installed.version, "0.0.0")
self.assertFalse(self.handler.is_bucket_installed(["testpackage0"]))
self.handler.install_bucket(["testpackage", "testpackage0"], lambda x: "", self.done_callback)
self.wait_for_callback(self.done_callback)
self.assertEqual(self.done_callback.call_args[0][0].bucket, ['testpackage', 'testpackage0'])
self.assertIsNone(self.done_callback.call_args[0][0].error)
self.assertTrue(self.handler.is_bucket_installed(["testpackage", "testpackage0"]))
self.assertEqual(self.handler.cache["testpackage"].installed.version, "0.0.1")
def test_install_with_foreign_foreign_arch_added(self):
"""Install packages with a foreign arch added"""
subprocess.call([self.dpkg, "--add-architecture", "foo"])
self.handler.cache.open() # reopen the cache with the new added architecture
bucket = ["testpackagefoo:foo", "testpackage1"]
with patch("umake.network.requirements_handler.subprocess") as subprocess_mock:
self.handler.install_bucket(bucket, lambda x: "", self.done_callback)
self.wait_for_callback(self.done_callback)
self.assertFalse(subprocess_mock.call.called)
self.assertEqual(self.done_callback.call_args[0][0].bucket, bucket)
self.assertIsNone(self.done_callback.call_args[0][0].error)
self.assertTrue(self.handler.is_bucket_installed(bucket))
def test_install_with_foreign_foreign_arch_not_added(self):
"""Install packages with a foreign arch, while the foreign arch wasn't added"""
bucket = ["testpackagefoo:foo", "testpackage1"]
self.handler.install_bucket(bucket, lambda x: "", self.done_callback)
self.wait_for_callback(self.done_callback)
self.assertEqual(self.done_callback.call_args[0][0].bucket, bucket)
self.assertIsNone(self.done_callback.call_args[0][0].error)
self.assertTrue(self.handler.is_bucket_installed(bucket))
def test_install_with_foreign_foreign_arch_add_fails(self):
"""Install packages with a foreign arch, where adding a foreign arch fails"""
bucket = ["testpackagefoo:foo", "testpackage1"]
with patch("umake.network.requirements_handler.subprocess") as subprocess_mock:
subprocess_mock.call.return_value = 1
self.handler.install_bucket(bucket, lambda x: "", self.done_callback)
self.wait_for_callback(self.done_callback)
self.assertTrue(subprocess_mock.call.called)
self.assertFalse(self.handler.is_bucket_installed(bucket))
self.expect_warn_error = True
def test_cant_change_seteuid(self):
"""Not being able to change the euid to root returns an error"""
os.seteuid.side_effect = PermissionError()
self.handler.install_bucket(["testpackage"], lambda x: "", self.done_callback)
self.wait_for_callback(self.done_callback)
self.assertEqual(self.done_callback.call_args[0][0].bucket, ['testpackage'])
self.assertIsNotNone(self.done_callback.call_args[0][0].error)
self.assertFalse(self.handler.is_bucket_installed(["testpackage"]))
self.expect_warn_error = True
| mbkulik/ubuntu-make | tests/small/test_requirements_handler.py | Python | gpl-3.0 | 29,261 |
#! /usr/bin/env python2.5
# -*- coding: utf-8 -*-
__author__ = ('Julian Togelius, [email protected]',
'Justin S Bayer, [email protected]')
__version__ = '$Id'
import scipy
from pybrain.rl.learners.blackboxoptimizers.blackboxoptimizer import BlackBoxOptimizer
class Particle(object):
def _setFitness(self, value):
self._fitness = value
if value > self.bestFitness:
self.bestFitness = value
self.bestPosition = self.position
def _getFitness(self):
return self._fitness
fitness = property(_getFitness, _setFitness)
def __init__(self, start):
"""Initialize a Particle at the given start vector."""
self.dim = scipy.size(start)
self.position = start
self.velocity = scipy.zeros(scipy.size(start))
self.bestPosition = scipy.zeros(scipy.size(start))
self._fitness = None
self.bestFitness = -scipy.inf
def move(self):
self.position += self.velocity
def fullyConnected(lst):
return dict((i, lst) for i in lst)
def ring(lst):
leftist = lst[1:] + lst[0:1]
rightist = lst[-1:] + lst[:-1]
return dict((i, (j, k)) for i, j, k in zip(lst, leftist, rightist))
class ParticleSwarmOptimizer(BlackBoxOptimizer):
def __init__(self, evaluator, evaluable, size, boundaries=None,
memory=2.0, sociality=2.0, inertia=0.9,
neighbourfunction=fullyConnected,
*args, **kwargs):
"""Initialize a ParticleSwarmOptimizer with `size` particles.
`boundaries` should be a list of (min, max) pairs with the length of the
dimensionality of the vector to be optimized. Particles will be
initialized with a position drawn uniformly in that interval.
`memory` indicates how much the velocity of a particle is affected by
its previous best position.
`sociality` indicates how much the velocity of a particle is affected by
its neighbours best position.
`inertia` is a damping factor.
"""
super(ParticleSwarmOptimizer, self).__init__(\
evaluator, evaluable, *args, **kwargs)
self.dim = scipy.size(evaluable)
self.inertia = inertia
self.sociality = sociality
self.memory = memory
self.neighbourfunction = neighbourfunction
if boundaries is None:
maxs = scipy.array([10] * self.dim)
mins = scipy.array([-10] * self.dim)
else:
mins = scipy.array([min_ for min_, max_ in boundaries])
maxs = scipy.array([max_ for min_, max_ in boundaries])
self.particles = []
for _ in xrange(size):
startingPosition = scipy.random.random(self.dim)
startingPosition *= (maxs - mins)
startingPosition += mins
self.particles.append(Particle(startingPosition))
# Global neighborhood
# TODO: do some better neighborhoods later
self.neighbours = self.neighbourfunction(self.particles)
def best(self, particlelist):
"""Return the particle with the best fitness from a list of particles.
"""
picker = min if self.minimize else max
return picker(particlelist, key=lambda p: p.fitness)
def _learnStep(self):
for particle in self.particles:
particle.fitness = self.evaluator(particle.position)
# Update the best solutions found so far.
better = False
if self.minimize:
if particle.fitness < self.bestEvaluation:
better = True
else:
if particle.fitness > self.bestEvaluation:
better = True
if better:
self.bestEvaluable = particle.position
self.bestEvaluation = particle.fitness
for particle in self.particles:
bestPosition = self.best(self.neighbours[particle]).position
diff_social = self.sociality \
* scipy.random.random() \
* (bestPosition - particle.position)
diff_memory = self.memory \
* scipy.random.random() \
* (particle.bestPosition - particle.position)
particle.velocity *= self.inertia
particle.velocity += diff_memory + diff_social
particle.move()
| daanwierstra/pybrain | pybrain/rl/learners/blackboxoptimizers/pso.py | Python | bsd-3-clause | 4,601 |
import os
import datetime
import traceback
import gzip
import sys
import subprocess
import yaml
from yaml.scanner import ScannerError
import warnings
import socket
from collections import deque
from copy import deepcopy
import numpy as np
import Bio
import ensembler
import ensembler.version
from ensembler.core import mpistate, logger, ManualOverrides
import simtk.unit as unit
import simtk.openmm as openmm
import simtk.openmm.app as app
import simtk.openmm.version
def refine_implicit_md(
openmm_platform=None, gpupn=1, process_only_these_targets=None,
process_only_these_templates=None, model_seqid_cutoff=None,
write_trajectory=False,
include_disulfide_bonds=False,
custom_residue_variants=None,
ff='amber99sbildn',
implicit_water_model='amber99_obc',
sim_length=100.0 * unit.picoseconds,
timestep=2.0 * unit.femtoseconds, # timestep
temperature=300.0 * unit.kelvin, # simulation temperature
collision_rate=20.0 / unit.picoseconds, # Langevin collision rate
cutoff=None, # nonbonded cutoff
minimization_tolerance=10.0 * unit.kilojoules_per_mole / unit.nanometer,
minimization_steps=20,
nsteps_per_iteration=500,
ph=None,
retry_failed_runs=False,
cpu_platform_threads=1,
loglevel=None):
# TODO - refactor
"""Run MD refinement in implicit solvent.
MPI-enabled.
"""
ensembler.utils.set_loglevel(loglevel)
gpuid = mpistate.rank % gpupn
manual_overrides = ManualOverrides()
if ph is None:
if manual_overrides.refinement.ph is not None:
ph = manual_overrides.refinement.ph
else:
ph = 7.0
if custom_residue_variants is None:
custom_residue_variants = deepcopy(
manual_overrides.refinement.custom_residue_variants_by_targetid
)
if (sim_length / timestep) < nsteps_per_iteration:
nsteps_per_iteration = int(sim_length / timestep)
niterations = int((sim_length / timestep) / nsteps_per_iteration)
models_dir = os.path.abspath(ensembler.core.default_project_dirnames.models)
targets, templates_resolved_seq = ensembler.core.get_targets_and_templates()
if process_only_these_templates:
selected_template_indices = [i for i, seq in enumerate(templates_resolved_seq) if seq.id in process_only_these_templates]
else:
selected_template_indices = range(len(templates_resolved_seq))
if not openmm_platform:
openmm_platform = auto_select_openmm_platform()
if openmm_platform == 'CPU':
platform_properties = {'CpuThreads': str(cpu_platform_threads)}
else:
platform_properties = {}
ff_files = [ff+'.xml', implicit_water_model+'.xml']
forcefield = app.ForceField(*ff_files)
kB = unit.MOLAR_GAS_CONSTANT_R
kT = kB * temperature
def simulate_implicit_md():
logger.debug("Reading model...")
with gzip.open(model_filename) as model_file:
pdb = app.PDBFile(model_file)
# Set up Platform
platform = openmm.Platform.getPlatformByName(openmm_platform)
if 'CUDA_VISIBLE_DEVICES' not in os.environ:
# Set GPU id.
if openmm_platform == 'CUDA':
platform.setPropertyDefaultValue('CudaDeviceIndex', '%d' % gpuid)
elif openmm_platform == 'OpenCL':
platform.setPropertyDefaultValue('OpenCLDeviceIndex', '%d' % gpuid)
# Construct Modeller object with same topology as ref structure
# (necessary to keep disulfide bonds consistent)
modeller = app.Modeller(reference_topology, pdb.positions)
# set_openmm_topology_bonds_from_atom_indices(modeller.topology, reference_bonds)
# Add missing protons.
modeller.addHydrogens(forcefield, pH=ph, variants=reference_variants)
topology = modeller.getTopology()
positions = modeller.getPositions()
logger.debug("Constructing System object...")
if cutoff is None:
system = forcefield.createSystem(topology, nonbondedMethod=app.NoCutoff, constraints=app.HBonds)
else:
system = forcefield.createSystem(topology, nonbondedMethod=app.CutoffNonPeriodic, nonbondedCutoff=cutoff, constraints=app.HBonds)
logger.debug("Creating Context...")
integrator = openmm.LangevinIntegrator(temperature, collision_rate, timestep)
context = openmm.Context(system, integrator, platform, platform_properties)
context.setPositions(positions)
logger.debug("Minimizing structure...")
openmm.LocalEnergyMinimizer.minimize(context, minimization_tolerance, minimization_steps)
if write_trajectory:
# Open trajectory for writing.
logger.debug("Opening trajectory for writing...")
trajectory_filename = os.path.join(model_dir, 'implicit-trajectory.pdb.gz')
trajectory_outfile = gzip.open(trajectory_filename, 'w')
app.PDBFile.writeHeader(topology, file=trajectory_outfile)
# Open energy trajectory for writing
energy_filename = os.path.join(model_dir, 'implicit-energies.txt')
energy_outfile = open(energy_filename, 'w')
energy_outfile.write('# iteration | simulation time (ps) | potential_energy (kT) | kinetic_energy (kT) | ns per day\n')
logger.debug("Running dynamics...")
import time
initial_time = time.time()
for iteration in range(niterations):
# integrate dynamics
integrator.step(nsteps_per_iteration)
# get current state
state = context.getState(getEnergy=True, getPositions=True)
simulation_time = state.getTime()
potential_energy = state.getPotentialEnergy()
kinetic_energy = state.getKineticEnergy()
final_time = time.time()
elapsed_time = (final_time - initial_time) * unit.seconds
ns_per_day = (simulation_time / elapsed_time) / (unit.nanoseconds / unit.day)
logger.debug(
" %8.1f ps : potential %8.3f kT | kinetic %8.3f kT | %.3f ns/day | %.3f s remain"
% (
simulation_time / unit.picoseconds, potential_energy / kT, kinetic_energy / kT,
ns_per_day,
elapsed_time * (niterations-iteration-1) / (iteration+1) / unit.seconds
)
)
# Check energies are still finite.
if np.isnan(potential_energy/kT) or np.isnan(kinetic_energy/kT):
raise Exception("Potential or kinetic energies are nan.")
if write_trajectory:
app.PDBFile.writeModel(topology, state.getPositions(), file=trajectory_outfile, modelIndex=iteration)
# write data
energy_outfile.write(" %8d %8.1f %8.3f %8.3f %.3f\n" % (iteration, simulation_time / unit.picoseconds, potential_energy / kT, kinetic_energy / kT, ns_per_day))
energy_outfile.flush()
if write_trajectory:
app.PDBFile.writeFooter(topology, file=trajectory_outfile)
trajectory_outfile.close()
energy_outfile.close()
# Write final PDB file.
pdb_outfile = gzip.open(pdb_filename, 'w')
app.PDBFile.writeHeader(topology, file=pdb_outfile)
app.PDBFile.writeFile(topology, state.getPositions(), file=pdb_outfile)
app.PDBFile.writeFooter(topology, file=pdb_outfile)
pdb_outfile.close()
for target in targets:
if process_only_these_targets and (target.id not in process_only_these_targets):
continue
models_target_dir = os.path.join(models_dir, target.id)
if mpistate.rank == 0:
target_starttime = datetime.datetime.utcnow()
if not os.path.exists(models_target_dir):
continue
mpistate.comm.Barrier()
# ========
# Determine topology (including protonation state) to use throughout
# ========
reference_model_id = get_highest_seqid_existing_model(models_target_dir=models_target_dir)
if reference_model_id is None:
continue
reference_model_path = os.path.join(models_target_dir, reference_model_id, 'model.pdb.gz')
with gzip.open(reference_model_path) as reference_pdb_file:
reference_pdb = app.PDBFile(reference_pdb_file)
logger.debug("Using %s as highest identity model" % (reference_model_id))
if not include_disulfide_bonds:
remove_disulfide_bonds_from_topology(reference_pdb.topology)
# Build topology for reference model
modeller = app.Modeller(reference_pdb.topology, reference_pdb.positions)
reference_topology = modeller.topology
reference_variants = modeller.addHydrogens(forcefield, pH=ph)
if target.id in custom_residue_variants:
apply_custom_residue_variants(reference_variants, custom_residue_variants[target.id])
logger.debug("Reference variants extracted:")
if reference_variants is not None:
for (residue_index, residue) in enumerate(reference_variants):
if residue is not None:
logger.debug("%8d %s" % (residue_index+1, residue))
logger.debug("")
else:
logger.debug(reference_variants)
if model_seqid_cutoff:
process_only_these_templates = ensembler.core.select_templates_by_seqid_cutoff(target.id, seqid_cutoff=model_seqid_cutoff)
selected_template_indices = [i for i, seq in enumerate(templates_resolved_seq) if seq.id in process_only_these_templates]
ntemplates_selected = len(selected_template_indices)
for template_index in range(mpistate.rank, ntemplates_selected, mpistate.size):
template = templates_resolved_seq[selected_template_indices[template_index]]
model_dir = os.path.join(models_target_dir, template.id)
if not os.path.exists(model_dir): continue
# Only simulate models that are unique following filtering by clustering.
unique_by_clustering = os.path.exists(os.path.join(model_dir, 'unique_by_clustering'))
if not unique_by_clustering: continue
# Pass if this simulation has already been run.
log_filepath = os.path.join(model_dir, 'implicit-log.yaml')
if os.path.exists(log_filepath):
with open(log_filepath) as log_file:
log_data = yaml.load(log_file, Loader=ensembler.core.YamlLoader)
if log_data.get('successful') is True:
continue
if log_data.get('finished') is True and (retry_failed_runs is False and log_data.get('successful') is False):
continue
# Check to make sure the initial model file is present.
model_filename = os.path.join(model_dir, 'model.pdb.gz')
if not os.path.exists(model_filename):
logger.debug('model.pdb.gz not present: target %s template %s rank %d gpuid %d' % (target.id, template.id, mpistate.rank, gpuid))
continue
pdb_filename = os.path.join(model_dir, 'implicit-refined.pdb.gz')
logger.info("-------------------------------------------------------------------------")
logger.info("Simulating %s => %s in implicit solvent for %.1f ps (MPI rank: %d, GPU ID: %d)" % (target.id, template.id, niterations * nsteps_per_iteration * timestep / unit.picoseconds, mpistate.rank, gpuid))
logger.info("-------------------------------------------------------------------------")
# Open log file
log_data = {
'mpi_rank': mpistate.rank,
'gpuid': gpuid if 'CUDA_VISIBLE_DEVICES' not in os.environ else os.environ['CUDA_VISIBLE_DEVICES'],
'openmm_platform': openmm_platform,
'finished': False,
'sim_length': str(sim_length),
'timestep': str(timestep),
'temperature': str(temperature),
'ph': ph,
}
log_file = ensembler.core.LogFile(log_filepath)
log_file.log(new_log_data=log_data)
try:
start = datetime.datetime.utcnow()
simulate_implicit_md()
timing = ensembler.core.strf_timedelta(datetime.datetime.utcnow() - start)
log_data = {
'finished': True,
'timing': timing,
'successful': True,
}
log_file.log(new_log_data=log_data)
except Exception as e:
trbk = traceback.format_exc()
warnings.warn(
'= ERROR start: MPI rank {0} hostname {1} gpuid {2} =\n{3}\n{4}\n= ERROR end: MPI rank {0} hostname {1} gpuid {2}'.format(
mpistate.rank, socket.gethostname(), gpuid, e, trbk
)
)
timing = ensembler.core.strf_timedelta(datetime.datetime.utcnow() - start)
log_data = {
'exception': e,
'traceback': ensembler.core.literal_str(trbk),
'timing': timing,
'finished': True,
'successful': False,
}
log_file.log(new_log_data=log_data)
logger.debug('Finished template loop: rank %d' % mpistate.rank)
mpistate.comm.Barrier()
if mpistate.rank == 0:
project_metadata = ensembler.core.ProjectMetadata(project_stage='refine_implicit_md', target_id=target.id)
datestamp = ensembler.core.get_utcnow_formatted()
nsuccessful_refinements = subprocess.check_output(['find', models_target_dir, '-name', 'implicit-refined.pdb.gz']).count('\n')
target_timedelta = datetime.datetime.utcnow() - target_starttime
metadata = {
'target_id': target.id,
'datestamp': datestamp,
'timing': ensembler.core.strf_timedelta(target_timedelta),
'openmm_platform': openmm_platform,
'process_only_these_targets': process_only_these_targets,
'process_only_these_templates': process_only_these_templates,
'model_seqid_cutoff': model_seqid_cutoff,
'write_trajectory': write_trajectory,
'include_disulfide_bonds': include_disulfide_bonds,
'custom_residue_variants': custom_residue_variants,
'ff': ff,
'implicit_water_model': implicit_water_model,
'sim_length': str(sim_length),
'timestep': str(timestep),
'temperature': str(temperature),
'collision_rate': str(collision_rate),
'cutoff': str(cutoff),
'nsteps_per_iteration': nsteps_per_iteration,
'ph': ph,
'nsuccessful_refinements': nsuccessful_refinements,
'python_version': sys.version.split('|')[0].strip(),
'python_full_version': ensembler.core.literal_str(sys.version),
'ensembler_version': ensembler.version.short_version,
'ensembler_commit': ensembler.version.git_revision,
'biopython_version': Bio.__version__,
'openmm_version': simtk.openmm.version.short_version,
'openmm_commit': simtk.openmm.version.git_revision,
}
project_metadata.add_data(metadata)
project_metadata.write()
mpistate.comm.Barrier()
mpistate.comm.Barrier()
if mpistate.rank == 0:
logger.info('Done.')
def auto_select_openmm_platform(available_platform_names=None):
if available_platform_names is None:
available_platform_names = ['CUDA', 'OpenCL', 'CPU', 'Reference']
for platform_name in available_platform_names:
try:
platform = openmm.Platform.getPlatformByName(platform_name)
if type(platform) == openmm.Platform:
logger.info('Auto-selected OpenMM platform: %s' % platform_name)
return platform_name
except Exception:
continue
raise Exception('No OpenMM platform found')
def get_highest_seqid_existing_model(targetid=None, models_target_dir=None):
"""
Parameters
----------
targetid: str
models_target_dir: str
Returns
-------
reference_model_id: str
e.g. 'FAK1_HUMAN_D0_4KAB_B'
"""
if not models_target_dir and targetid:
models_target_dir = os.path.join(ensembler.core.default_project_dirnames.models, targetid)
seqids_filepath = os.path.join(models_target_dir, 'sequence-identities.txt')
if not os.path.exists(seqids_filepath):
warnings.warn('ERROR: sequence-identities.txt file not found at path %s' % seqids_filepath)
return None
with open(seqids_filepath, 'r') as seqids_file:
seqids_data = [line.split() for line in seqids_file.readlines()]
# Find highest sequence identity model - topology will be used for all models
for seqid_data in seqids_data:
reference_model_id, reference_identity = seqid_data
reference_pdb_filepath = os.path.join(models_target_dir, reference_model_id, 'model.pdb.gz')
if os.path.exists(reference_pdb_filepath):
return reference_model_id
warnings.warn('ERROR: reference PDB model not found at path')
return None
def remove_disulfide_bonds_from_topology(topology):
"""Should work with topology object from OpenMM or mdtraj.
Parameters
----------
topology: simtk.openmm.app.Topology or mdtraj.Topology
"""
remove_bond_indices = []
for b, bond in enumerate(topology._bonds):
atom0, atom1 = bond
if (
atom0.residue.name == 'CYS' and atom1.residue.name == 'CYS'
and (atom0.residue.index != atom1.residue.index)
and (atom0.name == 'SG' and atom0.name == 'SG')
):
remove_bond_indices.append(b)
[topology._bonds.pop(b) for b in remove_bond_indices]
def apply_custom_residue_variants(variants, custom_variants_dict):
"""
Applies custom residue names to a list of residue names.
Acts on `variants` list in-place.
Parameters
----------
variants: list of str
typically generated from openmm.app.modeller.addHydrogens
custom_variants_dict: dict
keyed by 0-based residue index. Values should be residue name string.
e.g. {35: 'HID'}
"""
for residue_index in custom_variants_dict:
if residue_index >= len(variants):
raise Exception(
'Custom residue variant index ({}: \'{}\') out of range of variants (len: {})'.format(
residue_index, custom_variants_dict[residue_index], len(variants)
)
)
variants[residue_index] = custom_variants_dict[residue_index]
def solvate_models(process_only_these_targets=None, process_only_these_templates=None,
model_seqid_cutoff=None,
ff='amber99sbildn',
water_model='tip3p',
verbose=False,
padding=None):
"""Solvate models which have been subjected to MD refinement with implicit solvent.
MPI-enabled.
"""
if padding is None:
padding = 10.0 * unit.angstroms
elif type(padding) is float:
padding = padding * unit.angstroms
else:
raise Exception('padding must be passed as a float (in Angstroms)')
models_dir = os.path.abspath(ensembler.core.default_project_dirnames.models)
targets, templates_resolved_seq = ensembler.core.get_targets_and_templates()
if process_only_these_templates:
selected_template_indices = [i for i, seq in enumerate(templates_resolved_seq) if seq.id in process_only_these_templates]
else:
selected_template_indices = range(len(templates_resolved_seq))
ff_files = [ff+'.xml', water_model+'.xml']
forcefield = app.ForceField(*ff_files)
for target in targets:
if process_only_these_targets and (target.id not in process_only_these_targets): continue
models_target_dir = os.path.join(models_dir, target.id)
if not os.path.exists(models_target_dir): continue
if mpistate.rank == 0:
target_starttime = datetime.datetime.utcnow()
if model_seqid_cutoff:
process_only_these_templates = ensembler.core.select_templates_by_seqid_cutoff(target.id, seqid_cutoff=model_seqid_cutoff)
selected_template_indices = [i for i, seq in enumerate(templates_resolved_seq) if seq.id in process_only_these_templates]
ntemplates_selected = len(selected_template_indices)
for template_index in range(mpistate.rank, ntemplates_selected, mpistate.size):
template = templates_resolved_seq[selected_template_indices[template_index]]
model_dir = os.path.join(models_target_dir, template.id)
if not os.path.exists(model_dir): continue
model_filename = os.path.join(model_dir, 'implicit-refined.pdb.gz')
if not os.path.exists(model_filename): continue
print("-------------------------------------------------------------------------")
print("Solvating %s => %s in explicit solvent" % (target.id, template.id))
print("-------------------------------------------------------------------------")
# Pass if solvation has already been run for this model.
nwaters_filename = os.path.join(model_dir, 'nwaters.txt')
if os.path.exists(nwaters_filename):
continue
try:
if verbose: print("Reading model...")
with gzip.open(model_filename) as model_file:
pdb = app.PDBFile(model_file)
# Count initial atoms.
natoms_initial = len(pdb.positions)
# Add solvent
if verbose: print("Solvating model...")
modeller = app.Modeller(pdb.topology, pdb.positions)
modeller.addSolvent(forcefield, model='tip3p', padding=padding)
positions = modeller.getPositions()
# Get number of particles per water molecule by inspecting the last residue in the topology
resi_generator = modeller.topology.residues()
resi_deque = deque(resi_generator, maxlen=1)
last_resi = resi_deque.pop()
nparticles_per_water = len([atom for atom in last_resi.atoms()])
# Count final atoms.
natoms_final = len(positions)
nwaters = (natoms_final - natoms_initial) / nparticles_per_water
if verbose: print("Solvated model contains %d waters" % nwaters)
# Record waters.
with open(nwaters_filename, 'w') as nwaters_file:
nwaters_file.write('%d\n' % nwaters)
except Exception as e:
reject_file_path = os.path.join(model_dir, 'solvation-rejected.txt')
exception_text = '%r' % e
trbk = traceback.format_exc()
with open(reject_file_path, 'w') as reject_file:
reject_file.write(exception_text + '\n')
reject_file.write(trbk + '\n')
if mpistate.rank == 0:
project_metadata = ensembler.core.ProjectMetadata(project_stage='solvate_models', target_id=target.id)
datestamp = ensembler.core.get_utcnow_formatted()
target_timedelta = datetime.datetime.utcnow() - target_starttime
metadata = {
'target_id': target.id,
'datestamp': datestamp,
'model_seqid_cutoff': model_seqid_cutoff,
'process_only_these_targets': process_only_these_targets,
'process_only_these_templates': process_only_these_templates,
'python_version': sys.version.split('|')[0].strip(),
'python_full_version': ensembler.core.literal_str(sys.version),
'ensembler_version': ensembler.version.short_version,
'ensembler_commit': ensembler.version.git_revision,
'biopython_version': Bio.__version__,
'openmm_version': simtk.openmm.version.short_version,
'openmm_commit': simtk.openmm.version.git_revision,
'timing': ensembler.core.strf_timedelta(target_timedelta),
}
project_metadata.add_data(metadata)
project_metadata.write()
mpistate.comm.Barrier()
mpistate.comm.Barrier()
if mpistate.rank == 0:
print('Done.')
def determine_nwaters(process_only_these_targets=None,
process_only_these_templates=None, model_seqid_cutoff=None,
verbose=False,
select_at_percentile=None):
'''Determine distribution of nwaters, and select the value at a certain percentile.
If not user-specified, the percentile is set to 100 if there are less than 10 templates, otherwise it is set to 68.
'''
# Run serially
if mpistate.rank == 0:
models_dir = os.path.abspath(ensembler.core.default_project_dirnames.models)
targets, templates_resolved_seq = ensembler.core.get_targets_and_templates()
if process_only_these_templates:
selected_template_indices = [i for i, seq in enumerate(templates_resolved_seq) if seq.id in process_only_these_templates]
else:
selected_template_indices = range(len(templates_resolved_seq))
for target in targets:
# Process only specified targets if directed.
if process_only_these_targets and (target.id not in process_only_these_targets): continue
models_target_dir = os.path.join(models_dir, target.id)
if not os.path.exists(models_target_dir): continue
if model_seqid_cutoff:
process_only_these_templates = ensembler.core.select_templates_by_seqid_cutoff(target.id, seqid_cutoff=model_seqid_cutoff)
selected_template_indices = [i for i, seq in enumerate(templates_resolved_seq) if seq.id in process_only_these_templates]
ntemplates_selected = len(selected_template_indices)
if not select_at_percentile:
select_at_percentile = 100 if ntemplates_selected < 10 else 68
if verbose: print("Determining number of waters in each system from target '%s'..." % target.id)
nwaters_list = []
for template_index in range(ntemplates_selected):
template = templates_resolved_seq[selected_template_indices[template_index]]
if process_only_these_templates and template.id not in process_only_these_templates:
continue
model_dir = os.path.join(models_target_dir, template.id)
if not os.path.exists(model_dir): continue
try:
nwaters_filename = os.path.join(model_dir, 'nwaters.txt')
with open(nwaters_filename, 'r') as nwaters_file:
firstline = nwaters_file.readline()
nwaters = int(firstline)
nwaters_list.append(nwaters)
except Exception:
pass
nwaters_array = np.array(nwaters_list)
nwaters_array.sort()
nwaters_list_filename = os.path.join(models_target_dir, 'nwaters-list.txt')
with open(nwaters_list_filename, 'w') as nwaters_list_file:
for nwaters in nwaters_array:
nwaters_list_file.write('%12d\n' % nwaters)
# display statistics
index_selected = int((len(nwaters_array) - 1) * (float(select_at_percentile) / 100.0))
index68 = int((len(nwaters_array) - 1) * 0.68)
index95 = int((len(nwaters_array) - 1) * 0.95)
if len(nwaters_array) > 0:
logger.info('Number of waters in solvated models (target: %s): min = %d, max = %d, '
'mean = %.1f, 68%% = %.0f, 95%% = %.0f, chosen_percentile (%d%%) = %.0f' %
(
target.id,
nwaters_array.min(),
nwaters_array.max(),
nwaters_array.mean(),
nwaters_array[index68],
nwaters_array[index95],
select_at_percentile,
nwaters_array[index_selected]
)
)
filename = os.path.join(models_target_dir, 'nwaters-max.txt')
with open(filename, 'w') as outfile:
outfile.write('%d\n' % nwaters_array.max())
filename = os.path.join(models_target_dir, 'nwaters-use.txt')
with open(filename, 'w') as outfile:
outfile.write('%d\n' % nwaters_array[index_selected])
else:
logger.info('No nwaters information found.')
project_metadata = ensembler.core.ProjectMetadata(project_stage='determine_nwaters', target_id=target.id)
datestamp = ensembler.core.get_utcnow_formatted()
metadata = {
'target_id': target.id,
'datestamp': datestamp,
'model_seqid_cutoff': model_seqid_cutoff,
'select_at_percentile': select_at_percentile,
'process_only_these_targets': process_only_these_targets,
'process_only_these_templates': process_only_these_templates,
'python_version': sys.version.split('|')[0].strip(),
'python_full_version': ensembler.core.literal_str(sys.version),
'ensembler_version': ensembler.version.short_version,
'ensembler_commit': ensembler.version.git_revision,
'biopython_version': Bio.__version__,
}
project_metadata.add_data(metadata)
project_metadata.write()
mpistate.comm.Barrier()
mpistate.comm.Barrier()
if mpistate.rank == 0:
print('Done.')
def refine_explicit_md(
openmm_platform=None, gpupn=1, process_only_these_targets=None,
process_only_these_templates=None, model_seqid_cutoff=None,
verbose=False, write_trajectory=False,
include_disulfide_bonds=False,
ff='amber99sbildn',
water_model='tip3p',
nonbondedMethod = app.PME, # nonbonded method
cutoff = 0.9*unit.nanometers, # nonbonded cutoff
constraints = app.HBonds, # bond constrains
rigidWater = True, # rigid water
removeCMMotion = False, # remove center-of-mass motion
sim_length=100.0 * unit.picoseconds,
timestep=2.0 * unit.femtoseconds, # timestep
temperature=300.0 * unit.kelvin, # simulation temperature
pressure=1.0 * unit.atmospheres, # simulation pressure
collision_rate=20.0 / unit.picoseconds, # Langevin collision rate
barostat_period=50,
minimization_tolerance=10.0 * unit.kilojoules_per_mole / unit.nanometer,
minimization_steps=20,
nsteps_per_iteration=500,
write_solvated_model=False,
cpu_platform_threads=1,
retry_failed_runs=False,
serialize_at_start_of_each_sim=False):
'''Run MD refinement in explicit solvent.
MPI-enabled.
'''
gpuid = mpistate.rank % gpupn
models_dir = os.path.abspath(ensembler.core.default_project_dirnames.models)
targets, templates_resolved_seq = ensembler.core.get_targets_and_templates()
if (sim_length / timestep) < nsteps_per_iteration:
nsteps_per_iteration = int(sim_length / timestep)
niterations = int((sim_length / timestep) / nsteps_per_iteration)
if process_only_these_templates:
selected_template_indices = [i for i, seq in enumerate(templates_resolved_seq) if seq.id in process_only_these_templates]
else:
selected_template_indices = range(len(templates_resolved_seq))
if not openmm_platform:
openmm_platform = auto_select_openmm_platform()
if openmm_platform == 'CPU':
platform_properties = {'CpuThreads': str(cpu_platform_threads)}
else:
platform_properties = {}
ff_files = [ff+'.xml', water_model+'.xml']
forcefield = app.ForceField(*ff_files)
kB = unit.MOLAR_GAS_CONSTANT_R
kT = kB * temperature
def solvate_pdb(pdb, target_nwaters, water_model=water_model):
"""
Solvate the contents of a PDB file, ensuring it has exactly 'target_nwaters' waters.
ARGUMENTS
pdb (simtk.openmm.app.PDBFile) - the PDB file to solvate
nwaters (int) - number of waters to end up with
OPTIONAL ARGUMENTS
model (string) - solvent model to use (default: 'tip3p')
RETURNS
positions (list of list of simtk.unit.Quantity) - positions of particles
topology (simtk.openmm.app.Topology) - topology object for solvated system
ALGORITHM
The system is initially solvated with a box of size 'boxsize_guess'.
If the system has too few waters, the boxsize is scaled by boxsize_enlarge_factor
Once a sufficient number of waters are present, the last few waters are deleted to ensure target_nwaters is achieved.
TODO
There is no error checking to be sure that waters are not initially present in the system or that initially-present molecules are not deleted.
"""
natoms_per_solvent = 3
# Count initial atoms.
natoms_initial = len(pdb.positions)
if verbose: print("System initially has %d atoms (0 waters)" % (natoms_initial))
# Solvate with zero padding to determine min number of waters and minimal unit cell dimensions.
modeller = app.Modeller(pdb.topology, pdb.positions)
modeller.addSolvent(forcefield, model=water_model, padding=0.0*unit.angstroms)
topology = modeller.getTopology()
positions = modeller.getPositions()
box_min = topology.getUnitCellDimensions()
natoms_min = len(positions) # minimal number of atoms
nwaters_min = (natoms_min - natoms_initial) / natoms_per_solvent # minimal number of waters
volume_min = box_min[0] * box_min[1] * box_min[2]
residues = [ r for r in topology.residues() ] # build a list of residues
nresidues_min = len(residues) # number of residues
if verbose: print("Minimally solvated system has %d atoms (%d waters)" % (natoms_min, nwaters_min))
# Increase the box size by 10% and resolvate.
scale = 1.1
modeller = app.Modeller(pdb.topology, pdb.positions)
topology = modeller.getTopology()
topology.setUnitCellDimensions(box_min * scale)
modeller.addSolvent(forcefield, model=water_model)
positions = modeller.getPositions()
box_enlarged = topology.getUnitCellDimensions()
natoms_enlarged = len(positions) # minimal number of atoms
nwaters_enlarged = (natoms_enlarged - natoms_initial) / natoms_per_solvent # minimal number of waters
volume_enlarged = box_enlarged[0] * box_enlarged[1] * box_enlarged[2]
density = (nwaters_enlarged - nwaters_min) / (volume_enlarged - volume_min)
if verbose: print("Enlarged solvated system has %d atoms (%d waters) : density of %.3f waters / nm^3" % (natoms_enlarged, nwaters_enlarged, density / (1.0 / unit.nanometer**3)))
# Aim for slightly more waters than target.
over_target = False
extra_nwaters = 100
while not over_target:
delta_volume = (target_nwaters + extra_nwaters - nwaters_min) / density
scale = ((volume_min + delta_volume) / volume_min)**(1.0/3.0)
if verbose: print("Final target of %d waters, so attempting box size %s to achieve %d waters..." % (target_nwaters, str(box_min * scale), target_nwaters + extra_nwaters))
delta_volume = (target_nwaters + extra_nwaters - nwaters_min) / density
modeller = app.Modeller(pdb.topology, pdb.positions)
topology = modeller.getTopology()
topology.setUnitCellDimensions(box_min * scale)
modeller.addSolvent(forcefield, model=water_model)
positions = modeller.getPositions()
topology = modeller.getTopology()
natoms = len(positions) # minimal number of atoms
nwaters = (natoms - natoms_initial) / natoms_per_solvent # minimal number of waters
if verbose: print(" actual %d waters" % nwaters)
if (nwaters > target_nwaters):
over_target = True
else:
extra_nwaters += 100
# Delete waters to achieve target.
ndelete = nwaters - target_nwaters
if (ndelete > 0):
if verbose: print("Will delete %d waters..." % ndelete)
residues = [ r for r in topology.residues() ] # build a list of residues
nresidues = len(residues)
# Select a random subset to delete.
indices = np.random.permutation(range(nresidues_min,nresidues))
residues_to_delete = list()
for index in indices[0:ndelete]:
residues_to_delete.append(residues[index])
modeller.delete(residues_to_delete)
# Get topology and positions.
topology = modeller.getTopology()
positions = modeller.getPositions()
# Count number of waters.
natoms_final = len(positions)
nwaters = (natoms_final - natoms_initial) / 3
if (nwaters != target_nwaters):
raise Exception("Malfunction in solvate_pdb: nwaters = %d, target_nwaters = %d" % (nwaters, target_nwaters))
else:
if write_solvated_model:
# write solvated pdb file
with open(os.path.join(model_dir, 'model-solvated.pdb'), 'w') as pdb_outfile:
app.PDBFile.writeHeader(topology, file=pdb_outfile)
app.PDBFile.writeFile(topology, positions, file=pdb_outfile)
app.PDBFile.writeFooter(topology, file=pdb_outfile)
return [positions, topology]
def simulate_explicit_md():
# Set up Platform
platform = openmm.Platform.getPlatformByName(openmm_platform)
if 'CUDA_VISIBLE_DEVICES' not in os.environ:
# Set GPU id.
if openmm_platform == 'CUDA':
platform.setPropertyDefaultValue('CudaDeviceIndex', '%d' % gpuid)
elif openmm_platform == 'OpenCL':
platform.setPropertyDefaultValue('OpenCLDeviceIndex', '%d' % gpuid)
if verbose: print("Constructing System object...")
system = forcefield.createSystem(topology, nonbondedMethod=nonbondedMethod, cutoff=cutoff, constraints=constraints, rigidWater=rigidWater, removeCMMotion=removeCMMotion)
if verbose: print(" system has %d atoms" % (system.getNumParticles()))
# Add barostat.
if verbose: print("Adding barostat...")
barostat = openmm.MonteCarloBarostat(pressure, temperature, barostat_period)
system.addForce(barostat)
if verbose: print("Creating Context...")
integrator = openmm.LangevinIntegrator(temperature, collision_rate, timestep)
context = openmm.Context(system, integrator, platform, platform_properties)
context.setPositions(positions)
if verbose: print("Minimizing structure...")
openmm.LocalEnergyMinimizer.minimize(context, minimization_tolerance, minimization_steps)
if write_trajectory:
# Open trajectory for writing.
if verbose: print("Opening trajectory for writing...")
trajectory_filename = os.path.join(model_dir, 'explicit-trajectory.pdb.gz')
trajectory_outfile = gzip.open(trajectory_filename, 'w')
app.PDBFile.writeHeader(pdb.topology, file=trajectory_outfile)
# Open energy trajectory for writing
energy_filename = os.path.join(model_dir, 'explicit-energies.txt')
energy_outfile = open(energy_filename, 'w')
energy_outfile.write('# iteration | simulation time (ps) | potential_energy (kT) | kinetic_energy (kT) | volume (nm^3) | ns per day\n')
if verbose: print("Running dynamics...")
context.setVelocitiesToTemperature(temperature)
import time
initial_time = time.time()
if serialize_at_start_of_each_sim:
with open(system_filename[: system_filename.index('.xml')]+'-start.xml', 'w') as system_file:
system_file.write(openmm.XmlSerializer.serialize(system))
with open(integrator_filename[: integrator_filename.index('.xml')]+'-start.xml', 'w') as integrator_file:
integrator_file.write(openmm.XmlSerializer.serialize(integrator))
state = context.getState(getPositions=True, getVelocities=True, getForces=True, getEnergy=True, getParameters=True, enforcePeriodicBox=True)
with open(state_filename[: state_filename.index('.xml')]+'-start.xml', 'w') as state_file:
state_file.write(openmm.XmlSerializer.serialize(state))
for iteration in range(niterations):
# integrate dynamics
integrator.step(nsteps_per_iteration)
# get current state
state = context.getState(getEnergy=True)
simulation_time = state.getTime()
potential_energy = state.getPotentialEnergy()
kinetic_energy = state.getKineticEnergy()
final_time = time.time()
elapsed_time = (final_time - initial_time) * unit.seconds
ns_per_day = (simulation_time / elapsed_time) / (unit.nanoseconds / unit.day)
box_vectors = state.getPeriodicBoxVectors()
volume_in_nm3 = (box_vectors[0][0] * box_vectors[1][1] * box_vectors[2][2]) / (unit.nanometers**3) # TODO: Use full determinant
remaining_time = elapsed_time * (niterations-iteration-1) / (iteration+1)
if verbose: print(" %8.1f ps : potential %8.3f kT | kinetic %8.3f kT | volume %.3f nm^3 | %.3f ns/day | %.3f s remain" % (simulation_time / unit.picoseconds, potential_energy / kT, kinetic_energy / kT, volume_in_nm3, ns_per_day, remaining_time / unit.seconds))
if write_trajectory:
state = context.getState(getPositions=True)
app.PDBFile.writeModel(pdb.topology, state.getPositions(), file=trajectory_outfile, modelIndex=iteration)
# write data
energy_outfile.write(" %8d %8.1f %8.3f %8.3f %.3f %.3f\n" % (iteration, simulation_time / unit.picoseconds, potential_energy / kT, kinetic_energy / kT, volume_in_nm3, ns_per_day))
energy_outfile.flush()
if write_trajectory:
app.PDBFile.writeFooter(pdb.topology, file=trajectory_outfile)
trajectory_outfile.close()
energy_outfile.close()
state = context.getState(getPositions=True, enforcePeriodicBox=True)
try:
with gzip.open(pdb_filename, 'w') as pdb_outfile:
app.PDBFile.writeHeader(topology, file=pdb_outfile)
app.PDBFile.writeFile(topology, state.getPositions(), file=pdb_outfile)
app.PDBFile.writeFooter(topology, file=pdb_outfile)
except:
if os.path.exists(pdb_filename):
os.remove(pdb_filename)
raise
# Serialize system
if verbose: print("Serializing system...")
with gzip.open(system_filename+'.gz', 'w') as system_file:
system_file.write(openmm.XmlSerializer.serialize(system))
# Serialize integrator.
if verbose: print("Serializing integrator...")
with gzip.open(integrator_filename+'.gz', 'w') as integrator_file:
integrator_file.write(openmm.XmlSerializer.serialize(integrator))
# Serialize state.
if verbose: print("Serializing state...")
state = context.getState(getPositions=True, getVelocities=True, getForces=True, getEnergy=True, getParameters=True, enforcePeriodicBox=True)
with gzip.open(state_filename+'.gz', 'w') as state_file:
state_file.write(openmm.XmlSerializer.serialize(state))
for target in targets:
if process_only_these_targets and (target.id not in process_only_these_targets):
continue
models_target_dir = os.path.join(models_dir, target.id)
if mpistate.rank == 0:
target_starttime = datetime.datetime.utcnow()
if not os.path.exists(models_target_dir):
continue
mpistate.comm.Barrier()
# Determine number of waters to use.
nwaters_filename = os.path.join(models_target_dir, 'nwaters-use.txt')
with open(nwaters_filename, 'r') as infile:
line = infile.readline()
nwaters = int(line)
if model_seqid_cutoff:
process_only_these_templates = ensembler.core.select_templates_by_seqid_cutoff(target.id, seqid_cutoff=model_seqid_cutoff)
selected_template_indices = [i for i, seq in enumerate(templates_resolved_seq) if seq.id in process_only_these_templates]
ntemplates_selected = len(selected_template_indices)
for template_index in range(mpistate.rank, ntemplates_selected, mpistate.size):
template = templates_resolved_seq[selected_template_indices[template_index]]
model_dir = os.path.join(models_target_dir, template.id)
if not os.path.exists(model_dir): continue
# Pass if this simulation has already been run.
log_filepath = os.path.join(model_dir, 'explicit-log.yaml')
if os.path.exists(log_filepath):
with open(log_filepath) as log_file:
try:
log_data = yaml.load(log_file, Loader=ensembler.core.YamlLoader)
if log_data.get('successful') is True:
continue
if log_data.get('finished') is True and (retry_failed_runs is False and log_data.get('successful') is False):
continue
except ScannerError as e:
trbk = traceback.format_exc()
warnings.warn(
'= WARNING start: template {0} MPI rank {1} hostname {2} gpuid {3} =\n{4}\n{5}\n= WARNING end: template {0} MPI rank {1} hostname {2} gpuid {3}'.format(
template.id, mpistate.rank, socket.gethostname(), gpuid, e, trbk
)
)
# Check to make sure the initial model file is present.
model_filename = os.path.join(model_dir, 'implicit-refined.pdb.gz')
if not os.path.exists(model_filename):
if verbose: print('model.pdb.gz not present: target %s template %s rank %d gpuid %d' % (target.id, template.id, mpistate.rank, gpuid))
continue
pdb_filename = os.path.join(model_dir, 'explicit-refined.pdb.gz')
system_filename = os.path.join(model_dir, 'explicit-system.xml')
integrator_filename = os.path.join(model_dir, 'explicit-integrator.xml')
state_filename = os.path.join(model_dir, 'explicit-state.xml')
print("-------------------------------------------------------------------------")
print("Simulating %s => %s in explicit solvent for %.1f ps" % (target.id, template.id, niterations * nsteps_per_iteration * timestep / unit.picoseconds))
print("-------------------------------------------------------------------------")
# Open log file
log_data = {
'mpi_rank': mpistate.rank,
'gpuid': gpuid if 'CUDA_VISIBLE_DEVICES' not in os.environ else os.environ['CUDA_VISIBLE_DEVICES'],
'openmm_platform': openmm_platform,
'sim_length': '%s' % sim_length,
'finished': False,
}
log_file = ensembler.core.LogFile(log_filepath)
log_file.log(new_log_data=log_data)
try:
start = datetime.datetime.utcnow()
with gzip.open(model_filename) as model_file:
pdb = app.PDBFile(model_file)
if not include_disulfide_bonds:
remove_disulfide_bonds_from_topology(pdb.topology)
[positions, topology] = solvate_pdb(pdb, nwaters)
simulate_explicit_md()
timing = ensembler.core.strf_timedelta(datetime.datetime.utcnow() - start)
log_data = {
'finished': True,
'timing': timing,
'successful': True,
}
log_file.log(new_log_data=log_data)
except Exception as e:
trbk = traceback.format_exc()
warnings.warn(
'= ERROR start: template {0} MPI rank {1} hostname {2} gpuid {3} =\n{4}\n{5}\n= ERROR end: template {0} MPI rank {1} hostname {2} gpuid {3}'.format(
template.id, mpistate.rank, socket.gethostname(), gpuid, e, trbk
)
)
timing = ensembler.core.strf_timedelta(datetime.datetime.utcnow() - start)
log_data = {
'exception': e,
'traceback': ensembler.core.literal_str(trbk),
'timing': timing,
'finished': True,
'successful': False,
}
log_file.log(new_log_data=log_data)
if verbose:
print('Finished template loop: rank %d' % mpistate.rank)
mpistate.comm.Barrier()
if mpistate.rank == 0:
project_metadata = ensembler.core.ProjectMetadata(project_stage='refine_explicit_md', target_id=target.id)
datestamp = ensembler.core.get_utcnow_formatted()
nsuccessful_refinements = subprocess.check_output(['find', models_target_dir, '-name', 'explicit-refined.pdb.gz']).count('\n')
target_timedelta = datetime.datetime.utcnow() - target_starttime
metadata = {
'target_id': target.id,
'datestamp': datestamp,
'model_seqid_cutoff': model_seqid_cutoff,
'process_only_these_targets': process_only_these_targets,
'process_only_these_templates': process_only_these_templates,
'timing': ensembler.core.strf_timedelta(target_timedelta),
'ff': ff,
'water_model': water_model,
'nsuccessful_refinements': nsuccessful_refinements,
'python_version': sys.version.split('|')[0].strip(),
'python_full_version': ensembler.core.literal_str(sys.version),
'ensembler_version': ensembler.version.short_version,
'ensembler_commit': ensembler.version.git_revision,
'biopython_version': Bio.__version__,
'openmm_version': simtk.openmm.version.short_version,
'openmm_commit': simtk.openmm.version.git_revision
}
project_metadata.add_data(metadata)
project_metadata.write()
mpistate.comm.Barrier()
mpistate.comm.Barrier()
if mpistate.rank == 0:
print('Done.')
def readFileContents(filename):
import os.path
if os.path.exists(filename):
infile = open(filename, 'r')
elif os.path.exists(filename+'.gz'):
import gzip
infile = gzip.open(filename+'.gz', 'r')
else:
raise IOError('File %s not found' % filename)
contents = infile.read()
infile.close()
return contents
| danielparton/ensembler | ensembler/refinement.py | Python | gpl-2.0 | 52,535 |
# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserve.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from . import distiller
from .distiller import *
from . import distillation_strategy
from .distillation_strategy import *
__all__ = distiller.__all__
__all__ += distillation_strategy.__all__
| chengduoZH/Paddle | python/paddle/fluid/contrib/slim/distillation/__init__.py | Python | apache-2.0 | 802 |
# -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
class HydraPipeline(object):
def process_item(self, item, spider):
return item
| Raithalus/Project-Hydra | Hydra/Hydra/pipelines.py | Python | mit | 285 |
# (c) Crown Owned Copyright, 2016. Dstl.
"""lighthouse URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from apps.links.views import (
LinkCreate,
LinkDetail,
LinkInterstitial,
LinkList,
LinkStats,
LinkStatsCSV,
LinkUpdate,
LinkRedirect,
OverallLinkStats,
OverallLinkStatsCSV,
)
from apps.organisations.views import (
OrganisationCreate,
OrganisationDetail,
OrganisationList,
)
from apps.teams.views import (
TeamCreate,
TeamDetail,
TeamList,
TeamJoin,
TeamLeave,
)
from apps.home.views import Home
from apps.staticpages.views import StaticPageView, Status404View, Status500View
from apps.search.views import (
SearchStats,
SearchStatsCSV
)
from apps.accounts.views import (
LoginView,
LogoutView,
UserDetail,
UserList,
UserUpdateProfile,
UserUpdateProfileTeams,
UserFavouritesAdd,
UserFavouritesRemove,
)
handler404 = Status404View.as_view()
handler500 = Status500View.as_view()
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(
r'^login$',
LoginView.as_view(),
name='login',
),
url(
r'^logout$',
LogoutView.as_view(),
name='logout',
),
url(r'^$', Home.as_view(), name="home"),
url(
r'^users/?$',
UserList.as_view(),
name='user-list',
),
url(
r'^users/(?P<slug>[\w-]+)/update-profile/teams/?$',
UserUpdateProfileTeams.as_view(),
name='user-update-teams',
),
url(
r'^users/(?P<slug>[\w-]+)/update-profile/?$',
UserUpdateProfile.as_view(),
name='user-updateprofile',
),
url(
r'^users/(?P<slug>[\w-]+)/?$',
UserDetail.as_view(),
name='user-detail',
),
url(
r'^users/(?P<slug>[\w-]+)/favourites/add/?$',
UserFavouritesAdd.as_view(),
name='user-favourites-add',
),
url(
r'^users/(?P<slug>[\w-]+)/favourites/remove/?$',
UserFavouritesRemove.as_view(),
name='user-favourites-remove',
),
url(
r'^links/?$',
LinkList.as_view(),
name='link-list',
),
url(
r'^links/(?P<pk>\d+)/?$',
LinkDetail.as_view(),
name='link-detail',
),
url(
r'^links/(?P<pk>\d+)/stats$',
LinkStats.as_view(),
name='link-stats',
),
url(
r'^links/(?P<pk>\d+)/stats.csv$',
LinkStatsCSV.as_view(),
name='link-stats-csv',
),
url(
r'^links/(?P<pk>\d+)/redirect/?$',
LinkRedirect.as_view(),
name='link-redirect',
),
url(
r'^links/(?P<pk>\d+)/go/?$',
LinkInterstitial.as_view(),
name='link-interstitial',
),
url(
r'^links/(?P<pk>\d+)/edit/?$',
LinkUpdate.as_view(),
name='link-edit',
),
url(
r'^links/new/?$',
LinkCreate.as_view(),
name='link-create',
),
url(
r'^links/stats$',
OverallLinkStats.as_view(),
name='link-overall-stats',
),
url(
r'^links/stats.csv$',
OverallLinkStatsCSV.as_view(),
name='link-overall-stats-csv',
),
url(
r'^organisations/?$',
OrganisationList.as_view(),
name='organisation-list',
),
url(
r'^organisations/new/?$',
OrganisationCreate.as_view(),
name='organisation-create',
),
url(
r'^organisations/(?P<pk>\d+)/?$',
OrganisationDetail.as_view(),
name='organisation-detail',
),
url(
r'^organisations/(?P<org_id>\d+)/teams/new/?$',
TeamCreate.as_view(),
name='organisation-team-create',
),
url(
r'^teams/?$',
TeamList.as_view(),
name='team-list',
),
url(
r'^teams/new/?$',
TeamCreate.as_view(),
name='team-create',
),
url(
r'^teams/(?P<pk>\d+)/join/?$', TeamJoin.as_view(), name='team-join', ),
url(
r'^teams/(?P<pk>\d+)/leave/?$',
TeamLeave.as_view(),
name='team-leave', ),
url(
r'^teams/(?P<pk>\d+)/?$',
TeamDetail.as_view(),
name='team-detail',
),
url(
r'^api/',
include('apps.api.urls'),
),
url(
r'^search/stats/?$',
SearchStats.as_view(),
name='search-stats',
),
url(
r'^search/stats.csv$',
SearchStatsCSV.as_view(),
name='search-stats-csv',
),
url(
r'^(?P<slug>\w+)/?$',
StaticPageView.as_view(),
name='static-page'
),
]
urlpatterns += staticfiles_urlpatterns()
| dstl/lighthouse | lighthouse/urls.py | Python | mit | 5,370 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import json
import shutil
import zipfile
from urllib2 import urlopen
from setuptools import setup
from cStringIO import StringIO
BASE_URL = "https://github.com/cloudhead/less.js"
DEFAULT_VERSION = os.getenv('LESS_VERSION', '1.6.2')
PROJECT_DIR = os.environ.get('PROJECT_DIR')
def get_version():
if not PROJECT_DIR:
return DEFAULT_VERSION
package_file = os.path.join(PROJECT_DIR, 'package.json')
try:
package_json = json.load(open(package_file))
except (IOError, ValueError):
print "cannot find custom node version in package.json, using default"
else:
version = package_json.get('dependencies', {}).get('less', '')
if version.startswith('=='):
return version.replace('==', '')
return DEFAULT_VERSION
less_zip = urlopen("%s/archive/v%s.zip" % (BASE_URL, get_version()))
less_dir = zipfile.ZipFile(StringIO(less_zip.read()))
for entry in less_dir.namelist():
root_dir, __ = entry.split('/', 1)
break
less_dir.extractall()
scripts = []
data_files = []
lib_dir = os.path.join(root_dir, 'lib')
bin_dir = os.path.join(root_dir, 'bin')
for info in less_dir.infolist():
if info.filename.startswith(lib_dir) and info.filename.endswith('.js'):
path = '/'.join(info.filename.split('/')[1:-1])
data_files.append((path, [info.filename]))
elif info.filename.startswith(bin_dir) and os.path.isfile(info.filename):
scripts.append(info.filename)
setup(
name='virtual-less',
version='0.0.2',
description='Install lessc into your virtualenv',
author='Sebastian Vetter',
author_email='[email protected]',
url='http://github.com/elbaschid/virtual-less',
long_description="%s\n\n%s" % (open('README.rst').read(),
open('CHANGELOG.rst').read()),
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: POSIX',
'Programming Language :: JavaScript',
'Topic :: Software Development :: Libraries',
],
install_requires=[
'virtual-node>=0.0.3',
],
license='BSD',
scripts=scripts,
data_files=data_files,
)
# remove extracted files
shutil.rmtree(root_dir)
| elbaschid/virtual-less | setup.py | Python | bsd-3-clause | 2,441 |
import json
import tarfile
from .exceptions import DockerError, DockerContainerError
from .jsonstream import json_stream_result
from .multiplexed import multiplexed_result
from .utils import identical, parse_result
from .logs import DockerLog
class DockerContainers(object):
def __init__(self, docker):
self.docker = docker
async def list(self, **kwargs):
data = await self.docker._query_json(
"containers/json", method="GET", params=kwargs
)
return [DockerContainer(self.docker, **x) for x in data]
async def create_or_replace(self, name, config):
container = None
try:
container = await self.get(name)
if not identical(config, container._container):
running = container._container.get("State", {}).get("Running", False)
if running:
await container.stop()
await container.delete()
container = None
except DockerError:
pass
if container is None:
container = await self.create(config, name=name)
return container
async def create(self, config, *, name=None):
url = "containers/create"
config = json.dumps(config, sort_keys=True).encode("utf-8")
kwargs = {}
if name:
kwargs["name"] = name
data = await self.docker._query_json(
url, method="POST", data=config, params=kwargs
)
return DockerContainer(self.docker, id=data["Id"])
async def run(self, config, *, name=None):
"""
Create and start a container.
If container.start() will raise an error the exception will contain
a `container_id` attribute with the id of the container.
"""
try:
container = await self.create(config, name=name)
except DockerError as err:
# image not find, try pull it
if err.status == 404 and "Image" in config:
await self.docker.pull(config["Image"])
container = await self.create(config, name=name)
else:
raise err
try:
await container.start()
except DockerError as err:
raise DockerContainerError(
err.status, {"message": err.message}, container["id"]
)
return container
async def get(self, container, **kwargs):
data = await self.docker._query_json(
"containers/{container}/json".format(container=container),
method="GET",
params=kwargs,
)
return DockerContainer(self.docker, **data)
def container(self, container_id, **kwargs):
data = {"id": container_id}
data.update(kwargs)
return DockerContainer(self.docker, **data)
class DockerContainer:
def __init__(self, docker, **kwargs):
self.docker = docker
self._container = kwargs
self._id = self._container.get(
"id", self._container.get("ID", self._container.get("Id"))
)
self.logs = DockerLog(docker, self)
async def log(self, *, stdout=False, stderr=False, follow=False, **kwargs):
if stdout is False and stderr is False:
raise TypeError("Need one of stdout or stderr")
params = {"stdout": stdout, "stderr": stderr, "follow": follow}
params.update(kwargs)
inspect_info = await self.show()
is_tty = inspect_info["Config"]["Tty"]
response = await self.docker._query(
"containers/{self._id}/logs".format(self=self), method="GET", params=params
)
return await multiplexed_result(response, follow, is_tty=is_tty)
async def copy(self, resource, **kwargs):
# TODO: this is deprecated, use get_archive instead
request = json.dumps({"Resource": resource}, sort_keys=True).encode("utf-8")
data = await self.docker._query(
"containers/{self._id}/copy".format(self=self),
method="POST",
data=request,
headers={"content-type": "application/json"},
params=kwargs,
)
return data
async def get_archive(self, path: str) -> tarfile.TarFile:
response = await self.docker._query(
"containers/{self._id}/archive".format(self=self),
method="GET",
params={"path": path},
)
data = await parse_result(response)
return data
async def put_archive(self, path, data):
response = await self.docker._query(
"containers/{self._id}/archive".format(self=self),
method="PUT",
data=data,
headers={"content-type": "application/json"},
params={"path": path},
)
data = await parse_result(response)
return data
async def show(self, **kwargs):
data = await self.docker._query_json(
"containers/{self._id}/json".format(self=self), method="GET", params=kwargs
)
self._container = data
return data
async def stop(self, **kwargs):
response = await self.docker._query(
"containers/{self._id}/stop".format(self=self), method="POST", params=kwargs
)
await response.release()
return
async def start(self, **kwargs):
response = await self.docker._query(
"containers/{self._id}/start".format(self=self),
method="POST",
headers={"content-type": "application/json"},
data=kwargs,
)
await response.release()
return
async def restart(self, timeout=None):
params = {}
if timeout is not None:
params["t"] = timeout
response = await self.docker._query(
"containers/{self._id}/restart".format(self=self),
method="POST",
params=params,
)
await response.release()
return
async def kill(self, **kwargs):
response = await self.docker._query(
"containers/{self._id}/kill".format(self=self), method="POST", params=kwargs
)
await response.release()
return
async def wait(self, *, timeout=None, **kwargs):
data = await self.docker._query_json(
"containers/{self._id}/wait".format(self=self),
method="POST",
params=kwargs,
timeout=timeout,
)
return data
async def delete(self, **kwargs):
response = await self.docker._query(
"containers/{self._id}".format(self=self), method="DELETE", params=kwargs
)
await response.release()
async def websocket(self, **params):
path = "containers/{self._id}/attach/ws".format(self=self)
ws = await self.docker._websocket(path, **params)
return ws
async def port(self, private_port):
if "NetworkSettings" not in self._container:
await self.show()
private_port = str(private_port)
h_ports = None
# Port settings is None when the container is running with
# network_mode=host.
port_settings = self._container.get("NetworkSettings", {}).get("Ports")
if port_settings is None:
return None
if "/" in private_port:
return port_settings.get(private_port)
h_ports = port_settings.get(private_port + "/tcp")
if h_ports is None:
h_ports = port_settings.get(private_port + "/udp")
return h_ports
async def stats(self, *, stream=True):
if stream:
response = await self.docker._query(
"containers/{self._id}/stats".format(self=self), params={"stream": "1"}
)
return await json_stream_result(response)
else:
data = await self.docker._query_json(
"containers/{self._id}/stats".format(self=self), params={"stream": "0"}
)
return data
def __getitem__(self, key):
return self._container[key]
def __hasitem__(self, key):
return key in self._container
| paultag/aiodocker | aiodocker/containers.py | Python | mit | 8,186 |
import socket
import struct
import logging
class FitnessQuerier:
def __init__(self, config_values):
self._server_address = config_values['fitness_service_addr']
self._server_port = config_values['fitness_service_port']
self._query_type = config_values['fitness_type']
self._fitness_weights = config_values['fitness_weights']
self._id = config_values['robot_id']
self._ipaddr = ''
def start(self):
try:
s = self._create_socket()
resp = self._send_message(s, 'start')
s.close()
except (socket.error, socket.gaierror):
resp = -1
return resp
def get_fitness(self):
fitness = {}
for method, weight in zip(self._query_type, self._fitness_weights):
try:
s = self._create_socket()
fitness[method] = weight * self._send_message(s, 'fitness', method)[0][0]
s.close()
except (socket.error, socket.gaierror):
pass
return sum([fitness[m] for m in self._query_type])
def get_position(self):
try:
s = self._create_socket()
position = self._send_message(s, 'position')
s.close()
except (socket.error, socket.gaierror):
return ()
return position
def _create_socket(self):
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
except socket.error as err:
logging.error("Failed to create socket: {0}".format(err))
raise
try:
self._ipaddr = socket.gethostbyname(self._server_address)
s.connect((self._ipaddr, self._server_port))
except socket.gaierror:
logging.error("Cannot connect to host: {}".format(self._server_address))
raise
return s
def _send_message(self, sock, query_type, method=''):
if query_type == 'start':
qt = 1
message = struct.pack('!ll', qt, self._id)
elif query_type == 'fitness':
if method == '':
raise ValueError("Fitness evaluation method not specified")
qt = 2
if method == 'displacement':
met = 1
elif method == 'path':
met = 2
else:
raise NameError("Unknown fitness evaluation method: {}".format(method))
message = struct.pack('!lll', qt, self._id, met)
elif query_type == 'position':
qt = 3
message = struct.pack('!ll', qt, self._id)
else:
raise NameError("Unknown query type: {}".format(query_type))
try:
sock.sendall(message)
except socket.error:
logging.error("Couldn't send query")
if query_type == 'fitness':
response = sock.recv(4)
error = sock.recv(4)
return struct.unpack('!f', response), struct.unpack('!l', error)
elif query_type == 'start':
error = sock.recv(4)
return struct.unpack('!l', error)
elif query_type == 'position':
response1 = sock.recv(4)
response2 = sock.recv(4)
error = sock.recv(4)
return struct.unpack('!f', response1), struct.unpack('!f', response2), struct.unpack('!l', error)
| portaloffreedom/robot-baby | RobotController/hal/inputs/fitness_querier.py | Python | apache-2.0 | 3,383 |
from django.core.paginator import EmptyPage
from rest_framework import pagination
from rest_framework.response import Response
class StandardPagination(pagination.PageNumberPagination):
page_size = 10
page_size_query_param = 'page_size'
max_page_size = 100
def paginate_queryset(self, queryset, request, view=None):
if request.query_params.get('get_all', '').lower() == 'true':
return None
return super().paginate_queryset(queryset, request, view=view)
def get_paginated_response_schema(self, schema):
r = {
'type': 'object',
'properties': {
'pagination': {
'type': 'object',
'properties': {
'links': {
'type': 'object',
'properties': {
'next': {
'type': 'string',
'nullable': True,
'format': 'uri',
'example': 'http://api.example.org/accounts/?{page_query_param}=4'.format(
page_query_param=self.page_query_param)
},
'previous': {
'type': 'string',
'nullable': True,
'format': 'uri',
'example': 'http://api.example.org/accounts/?{page_query_param}=2'.format(
page_query_param=self.page_query_param)
},
}
},
'previous_page': {
'type': 'integer',
'example': 123,
},
'next_page': {
'type': 'integer',
'example': 123,
},
'start_index': {
'type': 'integer',
'example': 123,
},
'end_index': {
'type': 'integer',
'example': 123,
},
'total_entries': {
'type': 'integer',
'example': 123,
},
'total_pages': {
'type': 'integer',
'example': 123,
},
'page': {
'type': 'integer',
'example': 123,
},
}
},
'results': schema,
},
}
return r
def get_paginated_response(self, data):
try:
previous_page_number = self.page.previous_page_number()
except EmptyPage:
previous_page_number = None
try:
next_page_number = self.page.next_page_number()
except EmptyPage:
next_page_number = None
return Response({
'pagination': {
'links': {
'next': self.get_next_link(),
'previous': self.get_previous_link(),
},
'previous_page': previous_page_number,
'next_page': next_page_number,
'start_index': self.page.start_index(),
'end_index': self.page.end_index(),
'total_entries': self.page.paginator.count,
'total_pages': self.page.paginator.num_pages,
'page': self.page.number,
},
'results': data,
})
| interlegis/sapl | sapl/api/pagination.py | Python | gpl-3.0 | 3,967 |
# -*- coding: utf-8 -*-
# ------------------------------------------------------------
# streamondemand.- XBMC Plugin
# Canal para itafilmtv
# http://blog.tvalacarta.info/plugin-xbmc/streamondemand.
# By Costaplus
# ------------------------------------------------------------
import re
import xbmc
import filmontv
from core import config
from core import logger
from core import scrapertools
from core.item import Item
__channel__ = "megafiletube"
__category__ = "F"
__type__ = "generic"
__title__ = "megafiletube.xyz"
__language__ = "IT"
DEBUG = config.get_setting("debug")
host = "http://megafiletube.xyz"
film =host + "/browse.php?search=&cat=1&t_lang=4"
def isGeneric():
return True
#-----------------------------------------------------------------
def mainlist(item):
log("mainlist","mainlist")
itemlist =[]
itemlist.append(Item(channel=__channel__,action="elenco_film",title="[COLOR azure]Novità Film[/COLOR]" ,url=film ,thumbnail=thumbnovita,fanart=fanart))
itemlist.append(Item(channel=__channel__, title="[COLOR yellow]Cerca...[/COLOR]", action="search",thumbnail=thumbcerca,fanart=fanart))
return itemlist
#=================================================================
#-----------------------------------------------------------------
def elenco_film(item):
log("elenco_film", "elenco_film")
itemlist=[]
patron="img.*?src=.'(.*?)'.*?href=\"(.*?)\"[^>]+>(.*?)</a>"
for scrapedimg,scrapedurl,scrapedtitolo in scrapedAll(item.url,patron):
scrapedimg = scrapedimg.replace('\\','')
base=scrapedtitolo.replace(".","")
base=base.replace("(","")
titolo=base.split("20")[0]
itemlist.append(Item(channel=__channel__, action="dettaglio_film", title="[COLOR darkkhaki].torrent [/COLOR]""[COLOR azure]"+titolo+"[/COLOR]",fulltitle=scrapedtitolo, url=host+scrapedurl,thumbnail=scrapedimg, fanart=scrapedimg))
# Paginazione
# ===========================================================
pagina = scrapedAll(item.url, '<td class="highlight">.*?class="pager"><a.*?href="(.*?)"')
if len(pagina) > 0:
pagina=scrapertools.decodeHtmlentities(pagina[0])
log("megafiletube", "Pagina url: " + pagina)
itemlist.append(Item(channel=__channel__, action="elenco_film", title=AvantiTxt, url=pagina,thumbnail=AvantiImg, folder=True))
itemlist.append(Item(channel=__channel__, action="HomePage", title=HomeTxt, folder=True))
return itemlist
#=================================================================
#-----------------------------------------------------------------
def dettaglio_film(item):
log("dettaglio_film", "dettaglio_film")
itemlist=[]
#patronMagnet = "red3'>.*?<div class='icon3'>.*?href=\"(.*?)\".*?class='fa fa-magnet"
patronMagnet = '<div class=\'icon3\'> <a href="(magnet[^&]+)[^>]+>'
patronMagnet = patronMagnet.replace("&","&")
titolo=scrapedAll(item.url, patronMagnet)
patronTorrent = "<div class='icon3'>.*?href=\"(.*?)\".*?class='fa fa-download"
torrent =scrapedAll(item.url,patronTorrent)
patronTriler='<embed.*?src=\'(.*?)\''
Triler = scrapedAll(item.url,patronTriler)
xbmc.log("titolo " + titolo[0] + "torrent " + torrent[0] + " " + Triler[0] )
itemlist.append(Item(channel=__channel__, action="torrent", title="[COLOR yellow] Torrent [/COLOR] - [COLOR azure]Download[/COLOR] [I](" + host+torrent[0]+ ")[/I]",url=host+torrent[0], folder=True))
itemlist.append(Item(channel=__channel__, action="torrent",server="torrent", title="[COLOR yellow] Magnet [/COLOR] - [COLOR azure]Streaming[/COLOR] [I](" + titolo[0] + ")[/I]",url=titolo[0], folder=True))
itemlist.append(Item(channel=__channel__, action="findvideos", title="[COLOR yellow]Trailer [/COLOR]", url=item.url,folder=True))
itemlist.append(Item(channel=__channel__, action="cerca", title="[COLOR orange]Cerca in tutti i canali [/COLOR] "+ item.title, folder=True))
itemlist.append(Item(channel=__channel__,action="",title="[COLOR azure]Info Qualità:[/COLOR] [I]"+ item.fulltitle + "[/I]",folder=False))
return itemlist
#=================================================================
#-----------------------------------------------------------------
def search(item,texto):
log("serach","search " + texto)
itemlist = []
item.url = host+"/browse.php?search=" + texto +"&cat=1&t_lang=4"
return elenco_film(item)
#=================================================================
#-----------------------------------------------------------------
def cerca(item):
itemlist=[]
item.title=item.title.replace("[COLOR orange]Cerca nei canali [/COLOR]","")
xbmc.log("titolo:"+item.title)
itemlist=filmontv.do_search(item)
if len(itemlist)==0:
itemlist.append(Item(channel=__channel__,action="mainlist",title="[COLOR red]Nessun canale dispone di questo titolo![/COLOR]"))
return itemlist
#=================================================================
#-----------------------------------------------------------------
def torrent(item):
logger.info("[corsaronero.py] play")
itemlist = []
itemlist.append( Item(channel=__channel__, action="play", server="torrent", title=item.title , url=item.url , thumbnail=item.thumbnail , plot=item.plot , folder=False) )
return itemlist
#=================================================================
#=================================================================
# Funzioni di servizio
#-----------------------------------------------------------------
def scrapedAll(url="",patron=""):
matches = []
data = scrapertools.cache_page(url)
if DEBUG: logger.info("data:"+data)
MyPatron = patron
matches = re.compile(MyPatron, re.DOTALL).findall(data)
scrapertools.printMatches(matches)
return matches
#=================================================================
#-----------------------------------------------------------------
def log(funzione="",stringa="",canale=__channel__):
if DEBUG:logger.info("[" + canale + "].[" + funzione + "] " + stringa)
#=================================================================
#-----------------------------------------------------------------
def HomePage(item):
xbmc.executebuiltin("ReplaceWindow(10024,plugin://plugin.video.streamondemand)")
#=================================================================
thumbnovita="http://orig03.deviantart.net/6889/f/2014/079/7/b/movies_and_popcorn_folder_icon_by_matheusgrilo-d7ay4tw.png"
thumbcerca="http://dc467.4shared.com/img/fEbJqOum/s7/13feaf0c8c0/Search"
fanart="http://www.virgilioweb.it/wp-content/uploads/2015/06/film-streaming.jpg"
HomeTxt = "[COLOR yellow]Torna Home[/COLOR]"
AvantiTxt="[COLOR orange]Successivo>>[/COLOR]"
AvantiImg="http://2.bp.blogspot.com/-fE9tzwmjaeQ/UcM2apxDtjI/AAAAAAAAeeg/WKSGM2TADLM/s1600/pager+old.png"
| dentaku65/plugin.video.sod | channels/megafiletube.py | Python | gpl-3.0 | 7,034 |
'''
This function is run from the command line as:
python visualize.py --testcase testcase.json --tcp [fast|vegas|reno]
Raw measurements are dumped to /results/all_measurements.txt
Parsed measurements and plots are stored in /Code/Python/results/[rawdata,plots]
These directories are cleared at the start of each run.
Currently, supported plots include:
- link rate (mpbs)
- buffer occupancy (%)
- packet loss (packets)
- flow rate (Mbps)
- flow window size (packets)
- packet round trip time (ms)
Plenty more measurements are made, so check out the actual data dumps.
Any plot for which data is reported is plotted.
Time/Bin Averages are used when they improve understanding, but not when
they hide the inner workings of the network. In many cases events are
plotted directly.
Last Revised by Sushant Sundaresh on 6 Dec 2015
References:
http://matplotlib.org/examples/pylab_examples/simple_plot.html
http://stackoverflow.com/questions/4675728/redirect-stdout-to-a-file-in-python
http://stackoverflow.com/questions/14245227/python-reset-stdout-to-normal-after-previously-redirecting-it-to-a-file
http://stackoverflow.com/questions/273192/in-python-check-if-a-directory-exists-and-create-it-if-necessary
'''
import constants
import sys, os
import json
import matplotlib.pyplot as plt
import numpy as np
from main import MainLoop
from link import Link
from flow import Flow, Data_Source
from tcp_reno_working import Working_Data_Source_TCP_RENO, Working_Data_Sink_TCP_RENO
from tcp_fast_working import Working_Data_Source_TCP_FAST
from tcp_vegas_working import Working_Data_Source_TCP_VEGAS
def handle_linkrate (datamap, datalog):
if datalog["measurement"] == "linkrate":
if not (datalog["linkid"] in datamap.keys()):
datamap[datalog["linkid"]] = {}
if not (datalog["measurement"] in datamap[datalog["linkid"]].keys()):
datamap[datalog["linkid"]][datalog["measurement"]] = []
datamap[datalog["linkid"]][datalog["measurement"]].append(\
[ float(datalog["ms_globaltime"]), \
float(datalog["mbits_propagated"])\
]\
)
def handle_flowrate (datamap, datalog):
if datalog["measurement"] == "flowrate":
if not (datalog["flowid"] in datamap.keys()):
datamap[datalog["flowid"]] = {}
if not (datalog["measurement"] in datamap[datalog["flowid"]].keys()):
datamap[datalog["flowid"]][datalog["measurement"]] = []
datamap[datalog["flowid"]][datalog["measurement"]].append(\
[ float(datalog["ms_globaltime"]), \
float(datalog["mbits_received_at_sink"])\
]\
)
def handle_packet_loss (datamap, datalog):
if datalog["measurement"] == "packetloss":
if not (datalog["flowid"] in datamap.keys()):
datamap[datalog["flowid"]] = {}
if not (datalog["measurement"] in datamap[datalog["flowid"]].keys()):
datamap[datalog["flowid"]][datalog["measurement"]] = []
# exactly one packet loss is reported each time
datamap[datalog["flowid"]][datalog["measurement"]].append(\
[ float(datalog["ms_globaltime"]), \
float(1.0)\
]\
)
# Data is parsed into triply nested dict with key-levels at link-id,
# measurement type, and link buffer direction. The final values
# are just [time (ms), buffer fractional occupancy (0-1)]
def handle_buffer_occupancy (datamap, datalog):
if datalog["measurement"] == "bufferoccupancy":
if not (datalog["linkid"] in datamap.keys()):
datamap[datalog["linkid"]] = {}
if not (datalog["measurement"] in datamap[datalog["linkid"]].keys()):
datamap[datalog["linkid"]][datalog["measurement"]] = {}
if not (datalog["direction"] in datamap[datalog["linkid"]][datalog["measurement"]].keys()):
datamap[datalog["linkid"]][datalog["measurement"]][datalog["direction"]] = []
datamap[datalog["linkid"]][datalog["measurement"]][datalog["direction"]].append(\
[ float(datalog["ms_globaltime"]), \
float(datalog["fractional_buffer_occupancy"])\
]\
)
def handle_flow_window (datamap, datalog):
if datalog["measurement"] == "windowsize":
if not (datalog["flowid"] in datamap.keys()):
datamap[datalog["flowid"]] = {}
if not (datalog["measurement"] in datamap[datalog["flowid"]].keys()):
datamap[datalog["flowid"]][datalog["measurement"]] = []
datamap[datalog["flowid"]][datalog["measurement"]].append(\
[ float(datalog["ms_globaltime"]), \
float(datalog["windowsize"])\
]\
)
def handle_flow_state (datamap, datalog):
if datalog["measurement"] == "flowstate":
if not (datalog["flowid"] in datamap.keys()):
datamap[datalog["flowid"]] = {}
if not (datalog["measurement"] in datamap[datalog["flowid"]].keys()):
datamap[datalog["flowid"]][datalog["measurement"]] = []
datamap[datalog["flowid"]][datalog["measurement"]].append(\
[ float(datalog["ms_globaltime"]), \
float(datalog["state"])\
]\
)
def handle_packets_outstanding (datamap, datalog):
if datalog["measurement"] == "outstandingpackets":
if not (datalog["flowid"] in datamap.keys()):
datamap[datalog["flowid"]] = {}
if not (datalog["measurement"] in datamap[datalog["flowid"]].keys()):
datamap[datalog["flowid"]][datalog["measurement"]] = []
datamap[datalog["flowid"]][datalog["measurement"]].append(\
[ float(datalog["ms_globaltime"]), \
float(datalog["packets_out"]), \
float(datalog["packets_left"]),\
float(datalog["packets_in_transit"]),\
float(datalog["packets_ackd"]),\
float(datalog["total_packets"]),\
]\
)
def handle_flow_reno_debug (datamap, datalog):
if datalog["measurement"] == "fullrenodebug":
if not (datalog["flowid"] in datamap.keys()):
datamap[datalog["flowid"]] = {}
if not (datalog["measurement"] in datamap[datalog["flowid"]].keys()):
datamap[datalog["flowid"]][datalog["measurement"]] = []
datamap[datalog["flowid"]][datalog["measurement"]].append(\
[ float(datalog["ms_globaltime"]), \
datalog["SendReceive"],\
int(datalog["whichPacket"]),\
int(datalog["EPIT"]),\
int(datalog["LPIA"]),\
float(datalog["WS"]),\
float(datalog["CAT"]),\
float(datalog["STT"]),\
int(datalog["L3P0"]),\
int(datalog["L3P1"]),\
int(datalog["L3P2"]),\
datalog["TAF"],\
datalog["DAF"],\
datalog["SAF"],\
int(datalog["State"]),\
datalog["isTimeoutOccurring"],\
float(datalog["RTTactEst"]) ])
def handle_flow_vegas_debug (datamap, datalog):
if datalog["measurement"] == "fullvegasdebug":
if not (datalog["flowid"] in datamap.keys()):
datamap[datalog["flowid"]] = {}
if not (datalog["measurement"] in datamap[datalog["flowid"]].keys()):
datamap[datalog["flowid"]][datalog["measurement"]] = []
datamap[datalog["flowid"]][datalog["measurement"]].append(\
[ float(datalog["ms_globaltime"]), \
datalog["SendReceive"],\
int(datalog["whichPacket"]),\
int(datalog["EPIT"]),\
int(datalog["LPIA"]),\
float(datalog["WS"]),\
float(datalog["STT"]),\
int(datalog["L3P0"]),\
int(datalog["L3P1"]),\
int(datalog["L3P2"]),\
datalog["TAF"],\
datalog["DAF"],\
datalog["SAF"],\
int(datalog["State"]),\
datalog["FlagObserveRTT"],\
datalog["FlagRampWS"],\
datalog["isTimeoutOccurring"],\
float(datalog["RTTmin"]),\
float(datalog["RTTactEst"]),\
int(datalog["ICAPTUW"]) ])
def handle_flow_true_fast_debug (datamap, datalog):
if datalog["measurement"] == "fulltruefastdebug":
if not (datalog["flowid"] in datamap.keys()):
datamap[datalog["flowid"]] = {}
if not (datalog["measurement"] in datamap[datalog["flowid"]].keys()):
datamap[datalog["flowid"]][datalog["measurement"]] = []
datamap[datalog["flowid"]][datalog["measurement"]].append(\
[ float(datalog["ms_globaltime"]), \
datalog["SendReceive"],\
int(datalog["whichPacket"]),\
int(datalog["EPIT"]),\
int(datalog["LPIA"]),\
float(datalog["WS"]),\
float(datalog["STT"]),\
int(datalog["L3P0"]),\
int(datalog["L3P1"]),\
int(datalog["L3P2"]),\
datalog["TAF"],\
datalog["DAF"],\
datalog["SAF"],\
datalog["isTimeoutOccurring"],\
float(datalog["RTTmin"]),\
float(datalog["RTTmax"]),\
float(datalog["RTTactEst"]) ])
# Breaks time into ms_window chunks and sums values within bins
def windowed_sum(times, values, ms_window):
windowed_time = []
windowed_values = []
final_base_time = 0.0
update_bin_flag = True
k = 0
while k < len(times):
if update_bin_flag is True:
current_base_time = final_base_time
final_base_time = current_base_time + ms_window
current_bin_time = final_base_time - float(ms_window)/2
current_value_sum = 0.0
update_bin_flag = False
if times[k] <= final_base_time:
current_value_sum += values[k]
k += 1
else:
windowed_time.append(current_bin_time)
windowed_values.append(current_value_sum)
update_bin_flag = True
return (windowed_time, windowed_values)
# Takes a time array (ms) and a values array measured at those times
# The values must be levels, not additive quantities. E.g. buffer occupancy.
# Returns the windowed time-average of the values array binned into
# blocks of ms_window.
# Start at time 0, and for every event, keep track of forward inter-arrival t.
# Weight values by their time-to-next sample (level is constant till then)
# Divide by total ms_window to get value for that bin.
# Going forward, to the next window, remember the old value as the
# "starting state" (do NOT reset to 0)
def windowed_time_average(times, values, ms_window, initial_value):
windowed_time = []
windowed_values = []
final_base_time = 0.0
update_bin_flag = True
k = 0
while k < len(times):
if update_bin_flag is True:
update_bin_flag = False
current_base_time = final_base_time
final_base_time = current_base_time + ms_window
current_bin_time = final_base_time - float(ms_window)/2
if k == 0:
current_value = initial_value
if times[k] <= final_base_time:
current_value_time = times[k] - current_base_time
current_sum = current_value * current_value_time
else:
current_value_time = ms_window
current_sum = current_value * current_value_time
windowed_time.append(current_bin_time)
windowed_values.append(current_sum/ms_window)
update_bin_flag = True
continue
current_value = values[k]
if (k+1) < len(times):
nexteventtime = times[k+1]
else:
nexteventtime = final_base_time + 1
if nexteventtime <= final_base_time:
current_value_time = times[k+1] - times[k]
current_sum += current_value * current_value_time
else:
current_value_time = ms_window - (times[k] - current_base_time)
current_sum += current_value * current_value_time
windowed_time.append(current_bin_time)
windowed_values.append(current_sum/ms_window)
update_bin_flag = True
k += 1
return (windowed_time, windowed_values)
'''
Confirm windowed time average function returns proper results
for simple test cases:
Test1: First window is empty
ms_window = 10 ms
data = [[11ms, 1],[12ms,2],[13ms,1], [22ms,2]]
initial_value = 0
Result Expected:
t v
5 0
15 0*0.1 + 1*0.1 + 2*0.1 + 1*0.7 = 1.0
25 1*0.2 + 2 * 0.8 = 1.8
Test2: Second window is empty, non-zero initial value
ms_window = 8 ms
data = [[6ms, 2],[17ms,5],[23ms,1]]
initial_value = 1
Result Expected:
t v
4 0.75*1 + 0.25*2 = 1.25
12 2
20 0.125*2 + 0.75*5 + 0.125*1 = 0.25 + 3.75 + 0.125 = 4.125
Last Verified on 14 Nov 2015, 11 PM, by Sushant Sundaresh
Added to unit tests
'''
def test_windowed_time_average ():
names = ["Test1", "Test2"]
args = [([11.,12.,13.,22.], [1.,2.,1.,2.], 10., 0.),\
([6.,17.,23.],[2.,5.,1.],8.,1.)]
exps = [([5.,15.,25.], [0., 1.0, 1.8]),\
([4.,12.,20.],[1.25,2.,4.125])]
passFlag = True
for j in xrange(len(names)):
# print names[j]
t, v, w, i = args[j]
te, ve = exps[j]
ta, va = windowed_time_average(t,v,w,i)
for k in xrange(len(te)):
passFlag = passFlag and (ta[k] == te[k]) and (va[k] == ve[k])
return passFlag
# Element ID must be link string ID
# Will break if no data matches the specified element in your simulation logs
def plot_bufferoccupancy(datamap, linkID, ms_window, axes):
if linkID in datamap.keys():
epsilon = 10**-7
rtl_ms_times = [val[0] for val in datamap[linkID]["bufferoccupancy"][constants.RTL]]
ltr_ms_times = [val[0] for val in datamap[linkID]["bufferoccupancy"][constants.LTR]]
rtl_frac_occupancy = [val[1] for val in datamap[linkID]["bufferoccupancy"][constants.RTL]]
ltr_frac_occupancy = [val[1] for val in datamap[linkID]["bufferoccupancy"][constants.LTR]]
rtl_t, rtl_fo = windowed_time_average(rtl_ms_times, rtl_frac_occupancy, ms_window, 0.0) # buffers start empty
ltr_t, ltr_fo = windowed_time_average(ltr_ms_times, ltr_frac_occupancy, ms_window, 0.0) # buffers start empty
rtl_t = np.array([val/1000 for val in rtl_t]) # s
ltr_t = np.array([val/1000 for val in ltr_t]) # s
rtl_fo = np.array([100*val+epsilon for val in rtl_fo]) # %
ltr_fo = np.array([100*val+epsilon for val in ltr_fo]) # %
l1, l2 = axes.semilogy(rtl_t, rtl_fo,'kx-',ltr_t,ltr_fo,'r.-')
axes.set_ylabel("Left|Right Buffer [%s Full]" % '%')
axes.legend((l1,l2), ('Right-to-Left','Left-to-Right'), 'upper right')
axes.grid(True)
'''For ms_window time-windowing, need window >> timescale of events (10x PROPDELAY for links...)'''
def plot_linkrate (datamap, linkID, ms_window, axes):
if linkID in datamap.keys():
ms_times = [val[0] for val in datamap[linkID]["linkrate"]]
mbit_transfers = [val[1] for val in datamap[linkID]["linkrate"]]
t, mb = windowed_sum(ms_times, mbit_transfers, ms_window)
t = np.array([val/1000 for val in t]) # s
mbps = np.array([1000*val / ms_window for val in mb]) # Mbps
axes.plot(t, mbps,'k.-')
axes.set_ylabel("Mbps")
axes.grid(True)
'''For ms_window time-windowing, need window >> timescale of events (10x PROPDELAY for links...)'''
def plot_flow_rate (ms,mbits,label,ms_window,axes):
t, mb = windowed_sum(ms, mbits,ms_window)
t = np.array([val/1000 for val in t]) # s
mbps = np.array([1000.0*val / ms_window for val in mb]) # Mbps
axes.plot(t, mbps,'k.-')
axes.set_ylabel(label)
axes.grid(True)
# Usually there are too many of these points to integrate quickly
def plot_flow_window(ms,pkts,label,ms_window,axes):
t, w = windowed_time_average(ms, pkts, ms_window, 1.0) # W0=1 for all dynamic TCPs
t = np.array([val/1000 for val in t]) # s
w = np.array(w) # packets
axes.plot(t, w,'k.-')
axes.set_ylabel(label)
axes.grid(True)
def plot_flow_loss (ms,pkts,label,ms_window,axes):
t, plost = windowed_sum(ms, pkts, ms_window)
t = np.array([val/1000 for val in t]) # s
plost = np.array(plost) # packets
axes.plot(t, plost,'k.-')
axes.set_ylabel(label)
plt.grid(True)
# Usually there are too many of these points to integrate quickly
def plot_flow_delay (ms,ms_delay,label,ms_window,axes):
t, d = windowed_time_average(ms, ms_delay, ms_window, 0) # delay0=0 for our simulations
t = np.array([val/1000 for val in t]) # s
d = np.array(d) # ms
axes.plot(t, d,'k.-')
axes.set_ylabel(label)
plt.grid(True)
# Reference: http://stackoverflow.com/questions/273192/in-python-check-if-a-directory-exists-and-create-it-if-necessary
def ensure_dir(f):
d = os.path.dirname(f)
if not os.path.exists(d):
os.makedirs(d)
if __name__ == "__main__":
if (len(sys.argv) == 2 and sys.argv[1] == "--help") or (len(sys.argv) != 5 or sys.argv[1] != "--testcase" or sys.argv[3] != "--tcp" or sys.argv[4] not in ["fast","vegas","reno"]):
print "Usage: python visualize.py --testcase testcase.json --tcp [fast|vegas|reno]\n"
sys.exit(1)
measurementFilename = os.path.join('results','all_measurements.txt')
testImageFilename = os.path.join(os.path.join('results','plots'), "test.jpeg")
testRawDataFilename = os.path.join(os.path.join('results','rawdata'), "test.jpeg")
ensure_dir(measurementFilename)
ensure_dir(testImageFilename)
ensure_dir(testRawDataFilename)
testCase = sys.argv[2]
tcp = sys.argv[4]
for f in os.listdir("results"):
if not os.path.isdir(os.path.join('results',f)):
print "Cleaning up... removing %s" % os.path.join('results', f)
os.remove(os.path.join('results', f))
for f in os.listdir(os.path.join('results','plots')):
print "Cleaning up... removing %s" % os.path.join(os.path.join('results','plots'), f)
os.remove(os.path.join(os.path.join('results','plots'), f))
for f in os.listdir(os.path.join('results','rawdata')):
print "Cleaning up... removing %s" % os.path.join(os.path.join('results','rawdata'), f)
os.remove(os.path.join(os.path.join('results','rawdata'), f))
print "Simulating network..."
# Run Main Loop on Test Case 1, temporarily redirecting STDOUT
# STDERR will report progress.
sys.stdout = open(measurementFilename, 'w')
element_map = MainLoop().simulate(testCase,tcp)
sys.stdout = sys.__stdout__
print "Done simulating..."
print "Parsing results..."
# element id and measurement type to data map
# keyed as ['l1']['linkrate']
eimtod = {}
# Parse out measurements from measurements file
with open(measurementFilename) as m:
for line in m:
try:
log = json.loads(line)
if log["logtype"] == "measurement":
handle_linkrate(eimtod, log)
handle_buffer_occupancy(eimtod, log)
handle_packet_loss(eimtod, log)
handle_flowrate(eimtod, log)
handle_flow_window(eimtod, log)
handle_flow_state(eimtod, log)
handle_packets_outstanding(eimtod, log)
handle_flow_reno_debug(eimtod, log)
handle_flow_true_fast_debug(eimtod, log)
handle_flow_vegas_debug(eimtod, log)
# others
except ValueError:
pass
except KeyError:
raise
# Dump parsed measurements for visual debugging
for element in eimtod.keys():
for measurement in eimtod[element].keys():
if isinstance(eimtod[element][measurement],dict):
# more layers
for dataclass in eimtod[element][measurement].keys():
# actual data
with open(os.path.join(os.path.join('results','rawdata'),\
"%s_%s_%s.txt"%(element,measurement,dataclass)),'w') as f:
f.write("time\t\tvalue\n")
for t,v in eimtod[element][measurement][dataclass]:
f.write("%0.6e\t\t%0.6e\n"%(t,v))
else:
# actual data. handle debug dumps separately
# these aren't just for debugging; they have really useful
# data. we just aren't doing anything with most of it.
with open(os.path.join(os.path.join('results','rawdata'),\
"%s_%s.txt"%(element,measurement)),'w') as f:
if measurement == "outstandingpackets":
f.write("time\t\tout\t\tleft\t\tintransit\t\tackd\t\ttotal\n")
for t,v1,v2,v3,v4,v5 in eimtod[element][measurement]:
f.write("%0.6e\t\t%d\t\t%d\t\t%d\t\t%d\t\t%d\n"%(t,v1,v2,v3,v4,v5))
elif measurement == "fullrenodebug":
f.write("time\t\tReason\t\tPacketID\t\tEPIT\t\tLPIA\t\tWS\t\tCAT\t\tSTT\t\t[L3P0\t\tL3P1\t\tL3P2]\t\tTAF\t\tDAF\t\tSAF\t\tState\t\tTimeoutOccurred\t\tRTTEst\n")
for t,SendReceive,whichPacket,EPIT,LPIA,WS,CAT,STT,L3P0,L3P1,L3P2,TAF,DAF,SAF,State,TO,RTTEst in eimtod[element][measurement]:
f.write("%0.6e\t\t%s\t\t%d\t\t%d\t\t%d\t\t%0.3e\t\t%0.3e\t\t%0.6e\t\t[%d\t\t%d\t\t%d]\t\t%s\t\t%s\t\t%s\t\t%d\t\t%s\t\t%0.6e\n"%(t,SendReceive,whichPacket,EPIT,LPIA,WS,CAT,STT,L3P0,L3P1,L3P2,TAF,DAF,SAF,State,TO,RTTEst))
elif measurement == "fullvegasdebug":
f.write("time\t\tReason\t\tPacketID\t\tEPIT\t\tLPIA\t\tWS\t\tSTT\t\t[L3P0\t\tL3P1\t\tL3P2]\t\tTAF\t\tDAF\t\tSAF\t\tState\t\tObserve\t\tRamp\t\tTimeoutOccurred\t\tRTTmin\t\tRTTAct\t\tPacketsTillCanChangeWS\n")
for t,SendReceive,whichPacket,EPIT,LPIA,WS,STT,L3P0,L3P1,L3P2,TAF,DAF,SAF,State,FlagO,FlagR,TO,RTTm,RTTa,ICAPTUW in eimtod[element][measurement]:
f.write("%0.6e\t\t%s\t\t%d\t\t%d\t\t%d\t\t%0.3e\t\t%0.6e\t\t[%d\t\t%d\t\t%d]\t\t%s\t\t%s\t\t%s\t\t%d\t\t%s\t\t%s\t\t%s\t\t%0.6e\t\t%0.6e\t\t%d\n"%(t,SendReceive,whichPacket,EPIT,LPIA,WS,STT,L3P0,L3P1,L3P2,TAF,DAF,SAF,State,FlagO,FlagR,TO,RTTm,RTTa,ICAPTUW))
elif measurement == "fulltruefastdebug":
f.write("time\t\tReason\t\tPacketID\t\tEPIT\t\tLPIA\t\tWS\t\tSTT\t\t[L3P0\t\tL3P1\t\tL3P2]\t\tTAF\t\tDAF\t\tSAF\t\tTimeoutOccurred\t\tRTTmin\t\tRTTmax\t\tRTTAct\n")
for t,SendReceive,whichPacket,EPIT,LPIA,WS,STT,L3P0,L3P1,L3P2,TAF,DAF,SAF,TO,RTTmi,RTTma,RTTac in eimtod[element][measurement]:
f.write("%0.6e\t\t%s\t\t%d\t\t%d\t\t%d\t\t%0.3e\t\t%0.6e\t\t%d\t\t%d\t\t%d\t\t%s\t\t%s\t\t%s\t\t%s\t\t%0.6e\t\t%0.6e\t\t%0.6e\n"%(t,SendReceive,whichPacket,EPIT,LPIA,WS,STT,L3P0,L3P1,L3P2,TAF,DAF,SAF,TO,RTTmi,RTTma,RTTac))
else:
f.write("time\t\tvalue\n")
for t,v in eimtod[element][measurement]:
f.write("%0.6e\t\t%0.6e\n"%(t,v))
print "Done parsing results..."
print "Plotting results..."
'''
Want to plot, for each network element for which these data are available:
1 link rate (mpbs)
1 bin-averaged
2 buffer occupancy (%)
2 time-averaged
3 packet loss (packets)
3 bin-sum
4 flow rate (Mbps)
4 bin-averaged
5 flow window size (packets)
5 time-averaged
6 packet delay (ms)
6 event trace (solid line)
All will be black lines, solid, or single points, dotted.
Plots will be totally separated.
This code below is sensitive to LACK of data. It will likely break
if any of the expected data for standard plots is not found
in your simulation for some reason (weird locked routing, etc.)
'''
ms_window = constants.MS_WINDOW
for (d,v) in element_map.items():
if isinstance(v, Link):
myname = "Link %s"%v.get_id()
print "for %s..."%myname
myid = v.get_id()
all_plots = plt.figure()
linkrate_ax = all_plots.add_subplot(211)
buffocc_ax = all_plots.add_subplot(212)
plot_linkrate(eimtod, myid, ms_window, linkrate_ax)
plot_bufferoccupancy(eimtod, myid, ms_window, buffocc_ax)
linkrate_ax.set_title("%s Trace"%myname)
buffocc_ax.set_xlabel('Seconds')
all_plots.savefig(os.path.join(os.path.join('results','plots'),"%s.jpeg"%myid))
plt.close()
elif isinstance(v,Data_Source):
myid = v.get_id()
myname = myid.split('_')[0]
print "for Flow %s..."%myname
mysink = "%s_%s"%(myname,"dest") # see jsonparser.py
all_data = []
pltCount = 0
plot_functions = []
if isinstance(v, Working_Data_Source_TCP_RENO):
# guaranteed to have this data
mydata = eimtod[myid]["fullrenodebug"]
mytimes = [val[0] for val in mydata] # ms
myWS = [val[5] for val in mydata] # packets
myDelay = [val[16] for val in mydata] # ms
elif isinstance(v, Working_Data_Source_TCP_VEGAS):
# guaranteed to have this data
mydata = eimtod[myid]["fullvegasdebug"]
mytimes = [val[0] for val in mydata] # ms
myWS = [val[5] for val in mydata] # packets
myDelay = [val[18] for val in mydata] # ms
elif isinstance(v, Working_Data_Source_TCP_FAST):
# guaranteed to have this data
mydata = eimtod[myid]["fulltruefastdebug"]
mytimes = [val[0] for val in mydata] # ms
myWS = [val[5] for val in mydata] # packets
myDelay = [val[16] for val in mydata] # ms
plot_functions.append(lambda ((ms,dat,label,ms_window,axes)): plot_flow_window(ms,dat,label,ms_window,axes))
plot_functions.append(lambda ((ms,dat,label,ms_window,axes)): plot_flow_delay(ms,dat,label,ms_window,axes))
all_data.append([mytimes,myWS,'Window (pkts)'])
all_data.append([mytimes,myDelay,'RTT (ms)'])
pltCount += 2
pkLossFlag = False
if "packetloss" in eimtod[myid].keys():
mydata = eimtod[myid]["packetloss"]
myLossTime = [val[0] for val in mydata] # ms
myLoss = [val[1] for val in mydata] # 0, 1
all_data.append([myLossTime,myLoss,"Loss (pkts)"])
pltCount += 1
pkLossFlag = True
plot_functions.append(lambda ((ms,dat,label,ms_window,axes)): plot_flow_loss(ms,dat,label,ms_window,axes))
if "flowrate" in eimtod[mysink].keys():
mydata = eimtod[mysink]["flowrate"]
myRateTime = [val[0] for val in mydata] # ms
myRate = [val[1] for val in mydata] # mbits
all_data.append([myRateTime,myRate,"Mbps"])
pltCount += 1
plot_functions.append(lambda ((ms,dat,label,ms_window,axes)): plot_flow_rate(ms,dat,label,ms_window,axes))
all_plots = plt.figure()
myaxes = []
flow_ws_ax = all_plots.add_subplot(pltCount,1,1)
myaxes.append(flow_ws_ax)
flow_delay_ax = all_plots.add_subplot(pltCount,1,2)
myaxes.append(flow_delay_ax)
if pltCount == 3 and pkLossFlag:
flow_loss_ax = all_plots.add_subplot(pltCount,1,3)
myaxes.append(flow_loss_ax)
elif pltCount == 3:
flow_rate_ax = all_plots.add_subplot(pltCount,1,3)
myaxes.append(flow_rate_ax)
elif pltCount > 3:
flow_loss_ax = all_plots.add_subplot(pltCount,1,3)
myaxes.append(flow_loss_ax)
flow_rate_ax = all_plots.add_subplot(pltCount,1,4)
myaxes.append(flow_rate_ax)
for m in xrange(pltCount):
plot_functions[m]((all_data[m][0],all_data[m][1],all_data[m][2],ms_window,myaxes[m]))
myaxes[0].set_title("%s Trace"%myname)
myaxes[len(myaxes)-1].set_xlabel('Seconds')
all_plots.savefig(os.path.join(os.path.join('results','plots'),"%s.jpeg"%myname))
plt.close()
else:
continue
print "Done plotting results..."
print "Goodbye!"
sys.exit(0) | sssundar/NetworkSimulator | Code/Python/visualize.py | Python | gpl-2.0 | 25,750 |
from logging import FileHandler
from logging import Formatter
from logging.handlers import RotatingFileHandler
from collector.api.app import app
def get_file_handler():
if app.config.get('LOG_ROTATION'):
file_handler = RotatingFileHandler(
app.config.get('LOG_FILE'),
maxBytes=app.config.get('LOG_FILE_SIZE'),
backupCount='LOG_FILES_COUNT'
)
else:
file_handler = FileHandler(app.config.get('LOG_FILE'))
file_handler.setLevel(app.config.get('LOG_LEVEL'))
formatter = get_formatter()
file_handler.setFormatter(formatter)
return file_handler
def get_formatter():
DATE_FORMAT = "%Y-%m-%d %H:%M:%S"
LOG_FORMAT = "%(asctime)s.%(msecs)03d %(levelname)s " \
"[%(thread)x] (%(module)s) %(message)s"
return Formatter(fmt=LOG_FORMAT, datefmt=DATE_FORMAT)
def init_logger():
app.logger.addHandler(get_file_handler())
| sand8080/collector | collector/collector/api/log.py | Python | gpl-2.0 | 929 |
r"""Functions for $\tau\to \ell \nu\nu$ decays."""
import flavio
from math import log, sqrt, pi
def F(x):
return 1 - 8*x + 8*x**3 - x**4 - 12*x**2*log(x)
def G(x):
return 1 + 9*x - 9*x**2 - x**3 + 6*x*log(x) + 6*x**2*log(x)
def _BR(x, CL, CR):
return F(x) * (abs(CL)**2 + abs(CR)**2) - 4 * G(x) * (CL * CR.conjugate()).real
def GFeff(wc_obj, par):
r"""Effective Fermi constant in the presence of new physics."""
scale = flavio.config['renormalization scale']['mudecays']
wc = wc_obj.get_wc('nunumue', scale, par, eft='WET-3')
CL = wc['CVLL_numunueemu']
CR = wc['CVLR_numunueemu']
me = par['m_e']
mmu = par['m_mu']
GF = par['GF']
x = me**2 / mmu**2
CLSM = -4 * GF / sqrt(2)
r = _BR(x, CL + CLSM, CR) / _BR(x, CLSM, 0)
return GF / sqrt(r)
def BR_taulnunu(wc_obj, par, lep, nu1, nu2):
r"""BR of $\tau\to l nu_1\bar nu_2$ for specific neutrino flavours"""
if lep == 'e':
sec = 'nunutaue'
elif lep == 'mu':
sec = 'nunumutau'
scale = flavio.config['renormalization scale']['taudecays']
wc = wc_obj.get_wc(sec, scale, par, eft='WET-4')
ml = par['m_' + lep]
mtau = par['m_tau']
x = ml**2 / mtau**2
nnll = 'nu{}nu{}tau{}'.format(nu2, nu1, lep)
try:
CL = wc['CVLL_' + nnll]
CR = wc['CVLR_' + nnll]
except KeyError:
nnll = 'nu{}nu{}{}tau'.format(nu1, nu2, lep)
CL = wc['CVLL_' + nnll].conjugate()
CR = wc['CVLR_' + nnll].conjugate()
if nu1 == 'tau' and nu2 == lep:
# SM contribution, taking into account NP in mu->enunu!
GF = GFeff(wc_obj, par)
CL += -4 * GF / sqrt(2)
pre = par['tau_tau'] / 3 / 2**9 / pi**3 * mtau**5
alpha_e = flavio.physics.running.running.get_alpha_e(par, scale, nf_out=4)
# eq. (3) of arXiv:1310.7922
flavio.citations.register("Pich:2013lsa")
emcorr = 1 + alpha_e / (2 * pi) * (25 / 4 - pi**2)
return pre * _BR(x, CL, CR) * emcorr
def BR_taulnunu_summed(wc_obj, par, lep):
"""BR of tau->lnunu summed over neutrino flavours"""
_l = ['e', 'mu', 'tau']
return sum([BR_taulnunu(wc_obj, par, lep, nu1, nu2) for nu1 in _l for nu2 in _l])
# function returning function needed for prediction instance
def br_taulnunu(lep):
def f(wc_obj, par):
return BR_taulnunu_summed(wc_obj, par, lep)
return f
# Observable and Prediction instances
_tex = {'e': 'e', 'mu': r'\mu'}
for lep in _tex:
_process_tex = r"\tau^-\to " + _tex[lep] + r"^- \nu\bar\nu"
_process_taxonomy = r'Process :: $\tau$ lepton decays :: Leptonic tree-level decays :: $\tau\to \ell\nu\bar\nu$ :: $' + _process_tex + r"$"
_obs_name = "BR(tau->" + lep + "nunu)"
_obs = flavio.classes.Observable(_obs_name)
_obs.set_description(r"Branching ratio of $" + _process_tex + r"$")
_obs.tex = r"$\text{BR}(" + _process_tex + r")$"
_obs.add_taxonomy(_process_taxonomy)
flavio.classes.Prediction(_obs_name, br_taulnunu(lep))
| flav-io/flavio | flavio/physics/taudecays/taulnunu.py | Python | mit | 2,969 |
# Copyright (C) 2012 Prayush Kumar
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with with program; see the file COPYING. If not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307 USA
import lal
import numpy
from numpy import sqrt, log, float128
from pycuda.elementwise import ElementwiseKernel
from pycbc.libutils import pkg_config_header_strings
from pycbc.types import FrequencySeries, zeros, Array, complex64
preamble = """
#include <lal/LALConstants.h>
"""
phenomC_text = """
/* ********* Main paper : Phys Rev D82, 064016 (2010) ********* */
const double f = (double) (i + kmin ) * delta_f;
const double fd = (double) m_sec * f;
const double v = (double) cbrt(piM*f);
const double v2 = v * v;
const double v3 = v * v * v;
const double v4 = v2 * v2;
const double v5 = v2 * v3;
const double v6 = v3 * v3;
const double v7 = v3 * v4;
const double w = (double) cbrt( m_sec * f );
const double w3 = (double) w * w * w;
/* ******************************************************* */
/* *********************** Phasing *********************** */
/* This is defined in Eq 5.1 - 5.9, 3.13 of the main paper */
/* ******************************************************* */
double phSPA = 1. + pfa2 * v2 + pfa3 * v3 + pfa4 * v4 +
(1. + log(v3)) * pfa5 * v5 + (pfa6 + pfa6log * log(v3))*v6 +
pfa7 * v7;
phSPA *= (pfaN / v5);
phSPA -= (LAL_PI/4.0);
double phPM = (a1/(w3 * w * w)) + (a2/w3) + (a3/w) + a4 + (a5 * w * w) +(a6 * w3);
phPM /= eta;
double phRD = b1 + b2*fd;
double wPlusf1 = 0.5*(1. + tanh( (4*(fd - Mf1)/d1) ));
double wMinusf1 = 0.5*(1. - tanh( (4*(fd - Mf1)/d1) ));
double wPlusf2 = 0.5*(1. + tanh( (4*(fd - Mf2)/d2) ));
double wMinusf2 = 0.5*(1. - tanh( (4*(fd - Mf2)/d2) ));
double phasing = (phSPA * ((double) wMinusf1)) + (phPM * ((double) wPlusf1 * wMinusf2)) +
(phRD * ((double) wPlusf2));
/* ******************************************************* */
/* ********************** Amplitude **************** */
/* *** This is defined in Eq 5.11 - 5.13, 3.10, 3.6 ****** */
/* ******************************************************* */
double xdot = 1. + xdota2 * v2 + xdota3 * v3 + xdota4 * v4 + xdota5 * v5 +
(xdota6 + xdota6log * log(v2)) * v6 + xdota7 * v7;
xdot *= (xdotaN * v5 * v5);
double omgdot = 0.0, ampfac = 0.0;
double ampSPA = 0.0, ampSPAre = 0.0, ampSPAim = 0.0;
/* If xdot becomes negative, take ampSPA = 0.0 */
/* This is valid because it becomes negative much after ISCO */
if( xdot > 0.0 )
{
omgdot = 1.5 * v * xdot;
ampfac = sqrt( LAL_PI / omgdot );
ampSPAre = ampfac * AN * v2 * (1. + A2 * v2 + A3 * v3 + A4 * v4 +
A5 * v5 + (A6 + A6log * log(v2)) * v6);
ampSPAim = ampfac * AN * v2 * (A5imag * v5 + A6imag * v6);
ampSPA = sqrt( ampSPAre * ampSPAre + ampSPAim * ampSPAim );
}
double ampPM = ampSPA + (g1 * pow(fd, 5./6.));
const double sig = Mfrd * del2 / Q;
double sig2 = sig * sig;
double L = sig2 / ((fd - Mfrd) * (fd - Mfrd) + sig2/4.);
double ampRD = del1 * L * pow( fd, -7./6.);
double wPlusf0 = 0.5*(1. + tanh( (4*(fd - Mf0)/d0) ));
double wMinusf0 = 0.5*(1. - tanh( (4*(fd - Mf0)/d0) ));
double amplitude = (ampPM * ((double) wMinusf0)) + (ampRD * ((double) wPlusf0));
amplitude /= distance;
/* ************** htilde **************** */
htilde[i]._M_re = amplitude * cos( phasing );
htilde[i]._M_im = -1.0 * amplitude * sin( phasing );
"""
phenomC_kernel = ElementwiseKernel("""pycuda::complex<double> *htilde, int kmin, double delta_f,
double eta, double Xi, double distance,
double m_sec, double piM, double Mfrd,
double pfaN, double pfa2, double pfa3, double pfa4,
double pfa5, double pfa6, double pfa6log, double pfa7,
double a1, double a2, double a3, double a4,
double a5, double a6, double b1, double b2,
double Mf1, double Mf2, double Mf0,
double d1, double d2, double d0,
double xdota2, double xdota3, double xdota4,
double xdota5, double xdota6, double xdota6log,
double xdota7, double xdotaN, double AN,
double A2, double A3, double A4, double A5,
double A5imag, double A6, double A6log, double A6imag,
double g1, double del1, double del2, double Q""",
phenomC_text, "phenomC_kernel",
preamble=preamble, options=pkg_config_header_strings(['lal']))
def FinalSpin( Xi, eta ):
"""Computes the spin of the final BH that gets formed after merger. This is done usingn Eq 5-6 of arXiv:0710.3345"""
s4 = -0.129
s5 = -0.384
t0 = -2.686
t2 = -3.454
t3 = 2.353
etaXi = eta * Xi
eta2 = eta*eta
finspin = (Xi + s4*Xi*etaXi + s5*etaXi*eta + t0*etaXi + 2.*(3.**0.5)*eta + t2*eta2 + t3*eta2*eta)
if finspin > 1.0:
raise ValueError("Value of final spin > 1.0. Aborting")
else:
return finspin
def fRD( a, M):
"""Calculate the ring-down frequency for the final Kerr BH. Using Eq. 5.5 of Main paper"""
f = (lal.C_SI**3.0 / (2.0*lal.PI*lal.G_SI*M*lal.MSUN_SI)) * (1.5251 - 1.1568*(1.0-a)**0.1292)
return f
def Qa( a ):
"""Calculate the quality factor of ring-down, using Eq 5.6 of Main paper"""
return (0.7 + 1.4187*(1.0-a)**-0.4990)
#Functions to calculate the Tanh window, defined in Eq 5.8 of the main paper
def imrphenomc_tmplt(**kwds):
""" Return an IMRPhenomC waveform using CUDA to generate the phase and amplitude
Main Paper: arXiv:1005.3306
"""
# Pull out the input arguments
f_min = float128(kwds['f_lower'])
f_max = float128(kwds['f_final'])
delta_f = float128(kwds['delta_f'])
distance = float128(kwds['distance'])
mass1 = float128(kwds['mass1'])
mass2 = float128(kwds['mass2'])
spin1z = float128(kwds['spin1z'])
spin2z = float128(kwds['spin2z'])
if 'out' in kwds:
out = kwds['out']
else:
out = None
# Calculate binary parameters
M = mass1 + mass2
eta = mass1 * mass2 / (M * M)
Xi = (mass1 * spin1z / M) + (mass2 * spin2z / M)
Xisum = 2.*Xi
Xiprod = Xi*Xi
Xi2 = Xi*Xi
m_sec = M * lal.MTSUN_SI;
piM = lal.PI * m_sec;
## The units of distance given as input is taken to pe Mpc. Converting to SI
distance *= (1.0e6 * lal.PC_SI / (2. * sqrt(5. / (64.*lal.PI)) * M * lal.MRSUN_SI * M * lal.MTSUN_SI))
# Check if the value of f_max is correctly given, else replace with the fCut
# used in the PhenomB code in lalsimulation. The various coefficients come
# from Eq.(4.18) of http://arxiv.org/pdf/0710.2335 and
# Table I of http://arxiv.org/pdf/0712.0343
if not f_max:
f_max = (1.7086 * eta * eta - 0.26592 * eta + 0.28236) / piM
# Transform the eta, chi to Lambda parameters, using Eq 5.14, Table II of Main
# paper.
z101 = -2.417e-03
z102 = -1.093e-03
z111 = -1.917e-02
z110 = 7.267e-02
z120 = -2.504e-01
z201 = 5.962e-01
z202 = -5.600e-02
z211 = 1.520e-01
z210 = -2.970e+00
z220 = 1.312e+01
z301 = -3.283e+01
z302 = 8.859e+00
z311 = 2.931e+01
z310 = 7.954e+01
z320 = -4.349e+02
z401 = 1.619e+02
z402 = -4.702e+01
z411 = -1.751e+02
z410 = -3.225e+02
z420 = 1.587e+03
z501 = -6.320e+02
z502 = 2.463e+02
z511 = 1.048e+03
z510 = 3.355e+02
z520 = -5.115e+03
z601 = -4.809e+01
z602 = -3.643e+02
z611 = -5.215e+02
z610 = 1.870e+03
z620 = 7.354e+02
z701 = 4.149e+00
z702 = -4.070e+00
z711 = -8.752e+01
z710 = -4.897e+01
z720 = 6.665e+02
z801 = -5.472e-02
z802 = 2.094e-02
z811 = 3.554e-01
z810 = 1.151e-01
z820 = 9.640e-01
z901 = -1.235e+00
z902 = 3.423e-01
z911 = 6.062e+00
z910 = 5.949e+00
z920 = -1.069e+01
eta2 = eta*eta
Xi2 = Xiprod
# Calculate alphas, gamma, deltas from Table II and Eq 5.14 of Main paper
a1 = z101 * Xi + z102 * Xi2 + z111 * eta * Xi + z110 * eta + z120 * eta2
a2 = z201 * Xi + z202 * Xi2 + z211 * eta * Xi + z210 * eta + z220 * eta2
a3 = z301 * Xi + z302 * Xi2 + z311 * eta * Xi + z310 * eta + z320 * eta2
a4 = z401 * Xi + z402 * Xi2 + z411 * eta * Xi + z410 * eta + z420 * eta2
a5 = z501 * Xi + z502 * Xi2 + z511 * eta * Xi + z510 * eta + z520 * eta2
a6 = z601 * Xi + z602 * Xi2 + z611 * eta * Xi + z610 * eta + z620 * eta2
g1 = z701 * Xi + z702 * Xi2 + z711 * eta * Xi + z710 * eta + z720 * eta2
del1 = z801 * Xi + z802 * Xi2 + z811 * eta * Xi + z810 * eta + z820 * eta2
del2 = z901 * Xi + z902 * Xi2 + z911 * eta * Xi + z910 * eta + z920 * eta2
# Get the spin of the final BH
afin = FinalSpin( Xi, eta )
Q = Qa( abs(afin) )
# Get the fRD
frd = fRD( abs(afin), M)
Mfrd = frd * m_sec
# Define the frequencies where SPA->PM->RD
f1 = 0.1 * frd
Mf1 = m_sec * f1
f2 = frd
Mf2 = m_sec * f2
d1 = 0.005
d2 = 0.005
f0 = 0.98 * frd
Mf0 = m_sec * f0
d0 = 0.015
# Now use this frequency for calculation of betas
# calculate beta1 and beta2, that appear in Eq 5.7 in the main paper.
b2 = ((-5./3.)* a1 * pow(Mfrd,(-8./3.)) - a2/(Mfrd*Mfrd) - \
(a3/3.)*pow(Mfrd,(-4./3.)) + (2./3.)* a5 * pow(Mfrd,(-1./3.)) + a6)/eta
psiPMrd = (a1 * pow(Mfrd,(-5./3.)) + a2/Mfrd + a3 * pow(Mfrd,(-1./3.)) + \
a4 + a5 * pow(Mfrd,(2./3.)) + a6 * Mfrd)/eta
b1 = psiPMrd - (b2 * Mfrd)
### Calculate the PN coefficients, Eq A3 - A5 of main paper ###
pfaN = 3.0/(128.0 * eta)
pfa2 = (3715./756.) + (55.*eta/9.0)
pfa3 = -16.0*lal.PI + (113./3.)*Xi - 38.*eta*Xisum/3.
pfa4 = (152.93365/5.08032) - 50.*Xi2 + eta*(271.45/5.04 + 1.25*Xiprod) + \
3085.*eta2/72.
pfa5 = lal.PI*(386.45/7.56 - 65.*eta/9.) - \
Xi*(735.505/2.268 + 130.*eta/9.) + Xisum*(1285.0*eta/8.1 + 170.*eta2/9.) - \
10.*Xi2*Xi/3. + 10.*eta*Xi*Xiprod
pfa6 = 11583.231236531/4.694215680 - 640.0*lal.PI*lal.PI/3. - \
6848.0*lal.GAMMA/21. - 684.8*log(64.)/6.3 + \
eta*(2255.*lal.PI*lal.PI/12. - 15737.765635/3.048192) + \
76.055*eta2/1.728 - (127.825*eta2*eta/1.296) + \
2920.*lal.PI*Xi/3. - (175. - 1490.*eta)*Xi2/3. - \
(1120.*lal.PI/3. - 1085.*Xi/3.)*eta*Xisum + \
(269.45*eta/3.36 - 2365.*eta2/6.)*Xiprod
pfa6log = -6848./63.
pfa7 = lal.PI*(770.96675/2.54016 + 378.515*eta/1.512 - 740.45*eta2/7.56) - \
Xi*(20373.952415/3.048192 + 1509.35*eta/2.24 - 5786.95*eta2/4.32) + \
Xisum*(4862.041225*eta/1.524096 + 1189.775*eta2/1.008 - 717.05*eta2*eta/2.16 - 830.*eta*Xi2/3. + 35.*eta2*Xiprod/3.) - \
560.*lal.PI*Xi2 + 20.*lal.PI*eta*Xiprod + \
Xi2*Xi*(945.55/1.68 - 85.*eta) + Xi*Xiprod*(396.65*eta/1.68 + 255.*eta2)
xdotaN = 64.*eta/5.
xdota2 = -7.43/3.36 - 11.*eta/4.
xdota3 = 4.*lal.PI - 11.3*Xi/1.2 + 19.*eta*Xisum/6.
xdota4 = 3.4103/1.8144 + 5*Xi2 + eta*(13.661/2.016 - Xiprod/8.) + 5.9*eta2/1.8
xdota5 = -lal.PI*(41.59/6.72 + 189.*eta/8.) - Xi*(31.571/1.008 - 116.5*eta/2.4) + \
Xisum*(21.863*eta/1.008 - 79.*eta2/6.) - 3*Xi*Xi2/4. + \
9.*eta*Xi*Xiprod/4.
xdota6 = 164.47322263/1.39708800 - 17.12*lal.GAMMA/1.05 + \
16.*lal.PI*lal.PI/3 - 8.56*log(16.)/1.05 + \
eta*(45.1*lal.PI*lal.PI/4.8 - 561.98689/2.17728) + \
5.41*eta2/8.96 - 5.605*eta*eta2/2.592 - 80.*lal.PI*Xi/3. + \
eta*Xisum*(20.*lal.PI/3. - 113.5*Xi/3.6) + \
Xi2*(64.153/1.008 - 45.7*eta/3.6) - \
Xiprod*(7.87*eta/1.44 - 30.37*eta2/1.44)
xdota6log = -856./105.
xdota7 = -lal.PI*(4.415/4.032 - 358.675*eta/6.048 - 91.495*eta2/1.512) - \
Xi*(252.9407/2.7216 - 845.827*eta/6.048 + 415.51*eta2/8.64) + \
Xisum*(158.0239*eta/5.4432 - 451.597*eta2/6.048 + 20.45*eta2*eta/4.32 + 107.*eta*Xi2/6. - 5.*eta2*Xiprod/24.) + \
12.*lal.PI*Xi2 - Xi2*Xi*(150.5/2.4 + eta/8.) + \
Xi*Xiprod*(10.1*eta/2.4 + 3.*eta2/8.)
AN = 8.*eta*sqrt(lal.PI/5.)
A2 = (-107. + 55.*eta)/42.
A3 = 2.*lal.PI - 4.*Xi/3. + 2.*eta*Xisum/3.
A4 = -2.173/1.512 - eta*(10.69/2.16 - 2.*Xiprod) + 2.047*eta2/1.512
A5 = -10.7*lal.PI/2.1 + eta*(3.4*lal.PI/2.1)
A5imag = -24.*eta
A6 = 270.27409/6.46800 - 8.56*lal.GAMMA/1.05 + \
2.*lal.PI*lal.PI/3. + \
eta*(4.1*lal.PI*lal.PI/9.6 - 27.8185/3.3264) - \
20.261*eta2/2.772 + 11.4635*eta*eta2/9.9792 - \
4.28*log(16.)/1.05
A6log = -428./105.
A6imag = 4.28*lal.PI/1.05
### Define other parameters needed by waveform generation ###
kmin = int(f_min / delta_f)
kmax = int(f_max / delta_f)
n = kmax + 1;
if not out:
htilde = FrequencySeries(zeros(n,dtype=numpy.complex128), delta_f=delta_f, copy=False)
else:
if type(out) is not Array:
raise TypeError("Output must be an instance of Array")
if len(out) < kmax:
raise TypeError("Output array is too small")
if out.dtype != complex64:
raise TypeError("Output array is the wrong dtype")
htilde = FrequencySeries(out, delta_f=delta_f, copy=False)
phenomC_kernel(htilde.data[kmin:kmax], kmin, delta_f, eta, Xi, distance,
m_sec, piM, Mfrd,
pfaN, pfa2, pfa3, pfa4, pfa5, pfa6, pfa6log, pfa7,
a1, a2, a3, a4, a5, a6, b1, b2,
Mf1, Mf2, Mf0, d1, d2, d0,
xdota2, xdota3, xdota4, xdota5, xdota6, xdota6log,
xdota7, xdotaN, AN, A2, A3, A4, A5,
A5imag, A6, A6log, A6imag,
g1, del1, del2, Q )
hp = htilde
hc = htilde * 1j
return hp, hc
| hagabbar/pycbc_copy | pycbc/waveform/pycbc_phenomC_tmplt.py | Python | gpl-3.0 | 14,998 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('reddit', '0038_auto_20160708_0513'),
]
operations = [
migrations.AlterModelOptions(
name='submission',
options={'ordering': ('created_at',)},
),
]
| kiwiheretic/logos-v2 | reddit/migrations/0039_auto_20160708_0543.py | Python | apache-2.0 | 379 |
# coding=utf8
from setuptools import setup, find_packages
from youdao.config import __author__, __version__
setup(
name='YoudaoDict',
version=__version__,
keywords=('youdao', 'dict', 'partly offline dict', 'web spider'),
description="通过有道爬虫查询单词",
license='MIT',
author=__author__,
author_email='[email protected]',
url="https://github.com/hellflame/youdao",
packages=find_packages(),
install_requires=[
'beautifulsoup4',
'requests',
'gevent'
],
platforms="UNIX like",
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
"Environment :: Console",
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX'
],
entry_points={
'console_scripts': [
'youdao=youdao.run:main',
'service.youdao=youdao.service.run:main'
]
}
)
| hellflame/youdao | setup.py | Python | mit | 1,066 |
import os, time
from autotest.client.shared import error
from virttest import virsh, aexpect, utils_libvirtd
def run_virsh_edit(test, params, env):
"""
Test command: virsh edit.
The command can edit XML configuration for a domain
1.Prepare test environment,destroy or suspend a VM.
2.When the libvirtd == "off", stop the libvirtd service.
3.Perform virsh edit operation.
4.Recover test environment.
5.Confirm the test result.
"""
vm_name = params.get("main_vm")
vm = env.get_vm(vm_name)
domid = vm.get_id()
domuuid = vm.get_uuid()
libvirtd = params.get("libvirtd", "on")
vm_ref = params.get("edit_vm_ref")
status_error = params.get("status_error")
def modify_vcpu(source, edit_cmd):
"""
Modify vm's cpu infomation.
@param: source : virsh edit's option.
@param: dic_mode : a edit commad line .
@return: True if edit successed,False if edit failed.
"""
session = aexpect.ShellSession("sudo -s")
try:
session.sendline("export EDITOR=vi")
session.sendline("virsh edit %s" % source)
session.sendline(edit_cmd)
session.send('\x1b')
session.send('ZZ')
# use sleep(1) to make sure the modify has been completed.
time.sleep(1)
session.close()
return True
except:
return False
def edit_vcpu(source, guest_name):
"""
Modify vm's cpu infomation by virsh edit command.
@param: source : virsh edit's option.
@param: guest_name : vm's name.
@return: True if edit successed,False if edit failed.
"""
dic_mode = {"edit" : ":%s /1<\/vcpu>/2<\/vcpu>",
"recover" : ":%s /2<\/vcpu>/1<\/vcpu>"}
status = modify_vcpu(source, dic_mode["edit"])
if not status :
return status
if params.get("paused_after_start_vm") == "yes":
virsh.resume(guest_name, ignore_status=True)
virsh.destroy(guest_name)
elif params.get("start_vm") == "yes":
virsh.destroy(guest_name)
vcpus = vm.dominfo()["CPU(s)"]
#Recover cpuinfo
status = modify_vcpu(source, dic_mode["recover"])
if status and vcpus != '2':
return False
return status
#run test case
xml_file = os.path.join(test.tmpdir, 'tmp.xml')
virsh.dumpxml(vm_name, extra="", to_file=xml_file)
if libvirtd == "off":
utils_libvirtd.libvirtd_stop()
try:
if vm_ref == "id":
status = edit_vcpu(domid, vm_name)
elif vm_ref == "uuid":
status = edit_vcpu(domuuid, vm_name)
elif vm_ref == "name" and status_error == "no":
status = edit_vcpu(vm_name, vm_name)
else:
status = False
if vm_ref.find("invalid") != -1:
vm_ref = params.get(vm_ref)
elif vm_ref == "name":
vm_ref = "%s %s" % (vm_name, params.get("edit_extra_param"))
edit_status = virsh.edit(vm_ref).exit_status
if edit_status == 0:
status = True
except:
status = False
#recover libvirtd service start
if libvirtd == "off":
utils_libvirtd.libvirtd_start()
#Recover VM
if vm.is_alive():
vm.destroy()
virsh.undefine(vm_name)
virsh.define(xml_file)
#check status_error
if status_error == "yes":
if status:
raise error.TestFail("Run successfully with wrong command!")
elif status_error == "no":
if not status:
raise error.TestFail("Run failed with right command")
| sathnaga/virt-test | libvirt/tests/src/virsh_cmd/domain/virsh_edit.py | Python | gpl-2.0 | 3,714 |
class Character():
# Create a character
def __init__(self, char_name, char_description):
self.name = char_name
self.description = char_description
self.conversation = None
# Describe this character
def describe(self):
print( self.name + " is here!" )
print( self.description )
# Set what this character will say when talked to
def set_conversation(self, conversation):
self.conversation = conversation
# Talk to this character
def talk(self):
if self.conversation is not None:
print("[" + self.name + " says]: " + self.conversation)
else:
print(self.name + " doesn't want to talk to you")
# Fight with this character
def fight(self, combat_item):
print(self.name + " doesn't want to fight with you")
return True
class Enemy(Character):
def __init__(self,char_name,char_description):
super().__init__(char_name,char_description)
self.weakness = None
def fight(self, combat_item):
if combat_item == self.weakness:
print("You fend " + self.name + " off with the " + combat_item)
return True
else:
print(self.name + " crushes you, puny adventurer!")
return False
def set_weakness(self, item_weakness):
self.weakness = item_weakness
def get_weakness(self):
return self.weakness
class Friend(Character):
def __init__(self,char_name,char_description):
super().__init__(char_name,char_description)
self.feelings = None
def set_feelings(self, character_feelings):
self.feelings = character_feelings
def get_feelings(self):
return self.feelings
| darrell24015/FutureLearn | Python/Week3/character.py | Python | gpl-3.0 | 1,637 |
from django.conf.urls import include
from django.conf.urls import url
from skillboards import views
urlpatterns = [
url(r'^boards$', views.board_list),
url(r'^boards/(?P<board_name>[a-zA-Z0-9_-]+)/', include([
url(r'^$', views.board_detail),
url(r'^players/', include([
url(r'^$', views.player_list),
url(r'^(?P<username>[a-zA-Z0-9_-]+)$', views.player_detail),
url(r'^(?P<username>[a-zA-Z0-9_-]+)/recent_game$', views.player_recent_game),
])),
url(r'^register$', views.register),
url(r'^full_game$', views.game),
])),
url(r'^poke$', views.poke),
]
| Lucretiel/SkillServe | skillboards/urls.py | Python | gpl-3.0 | 643 |
#!/usr/bin/env python
# Copyright (C) 2012 Andrea Valle
#
# This file is part of swgit.
#
# swgit is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# swgit is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with swgit. If not, see <http://www.gnu.org/licenses/>.
import unittest
from test_base import *
from _utils import *
from _git__utils import *
from _swgit__utils import *
class Test_Tag( Test_Base ):
TAG_CLONE_DIR = SANDBOX + "TEST_TAG_CLONE/"
TAG_CLONE_DIR_2 = SANDBOX + "TEST_TAG_CLONE_2/"
TAG_REPO_DIR = SANDBOX + "TEST_TAG_REPO/"
BRANCH_NAME = "prova_tag"
ORIG_MOD_BRANCH = "orig_modbr"
ORIG_MOD_FULL_BRANCH = "%s/%s/%s/FTR/%s" % ( TEST_REPO_R, TEST_REPO_S, ORIG_REPO_GITUSER, ORIG_MOD_BRANCH )
ORIG_MOD_DEV = "%s/DEV/000" % ( ORIG_MOD_FULL_BRANCH )
DDTS = "Issue12345"
CREATED_BR = "%s/%s/%s/FTR/%s" % ( ORIG_REPO_REL, ORIG_REPO_SUBREL, TEST_USER, BRANCH_NAME )
CREATED_BR_NEWBR = "%s/NEW/BRANCH" % ( CREATED_BR )
CREATED_BR_DEV = "%s/DEV/000" % ( CREATED_BR )
CREATED_BR_FIX = "%s/FIX/%s" % ( CREATED_BR, DDTS )
REMOTE_2_NAME = "aRemote"
REMOTE_2_URL = "%s%s" % (REPO_SSHACCESS, TAG_CLONE_DIR_2)
def setUp( self ):
if hasattr( self, '_TestCase__testMethodName' ): #older python
initlogs( self._TestCase__testMethodName + ".log" )
elif hasattr( self, '_testMethodName' ): #newer python
initlogs( self._testMethodName + ".log" )
#else write on global file (swgit_tests.log)
shutil.rmtree( self.TAG_REPO_DIR, True )
shutil.rmtree( self.TAG_CLONE_DIR, True )
shutil.rmtree( self.TAG_CLONE_DIR_2, True )
self.swgitUtil_Repo_ = swgit__utils( self.TAG_REPO_DIR )
self.swgitUtil_Clone_ = swgit__utils( self.TAG_CLONE_DIR )
self.swgitUtil_Clone_2 = swgit__utils( self.TAG_CLONE_DIR_2 )
self.gitUtil_Repo_ = git__utils( self.TAG_REPO_DIR )
self.gitUtil_Clone_ = git__utils( self.TAG_CLONE_DIR )
self.CREATED_BR = "%s/%s/%s/FTR/%s" % ( TEST_REPO_R, TEST_REPO_S, TEST_USER, self.BRANCH_NAME )
self.CREATED_DEV_0 = "%s/DEV/000" % ( self.CREATED_BR )
self.CREATED_DEV_1 = "%s/DEV/001" % ( self.CREATED_BR )
self.CREATED_DEV_2 = "%s/DEV/002" % ( self.CREATED_BR )
self.DDTS_0 = "Issue00000"
self.DDTS_1 = "Issue11111"
self.DDTS_2 = "Issue22222"
self.CREATED_FIX_0 = "%s/FIX/%s" % ( self.CREATED_BR, self.DDTS_0 )
self.CREATED_FIX_1 = "%s/FIX/%s" % ( self.CREATED_BR, self.DDTS_1 )
self.CREATED_FIX_2 = "%s/FIX/%s" % ( self.CREATED_BR, self.DDTS_2 )
self.MODIFY_FILE = "%s/a.txt" % ( self.TAG_CLONE_DIR )
self.G2C_NAME = "export"
self.CREATED_G2C = "%s/G2C/%s" % ( TEST_REPO_BR_DEV, self.G2C_NAME )
self.SLC_LIV = "LIV.A.40"
self.PLAT_LIV = "PLAT.10"
self.FLC_LIV = "LIV.4.0.0.DROP.AH"
self.ZIC_LIV = "ZIC.10"
def tearDown( self ):
pass
def clone_createBr_modify( self, somecommmitondev = False ):
#first create repo
create_dir_some_file( self.TAG_REPO_DIR )
out, errCode = swgit__utils.init_dir( self.TAG_REPO_DIR )
self.assertEqual( errCode, 0, "SWGIT init FAILED - swgit__utils.init_dir - \n%s\n" % out )
if somecommmitondev:
#create also an empty commit on develop
out, errCode = self.swgitUtil_Repo_.modify_repo( TEST_REPO_FILE_A, msg = "d.", gotoint = False )
out, errCode = self.swgitUtil_Repo_.modify_repo( TEST_REPO_FILE_A, msg = "d.", gotoint = False )
#create a commit usefull for some tests
out, errCode = self.swgitUtil_Repo_.branch_create_src( self.ORIG_MOD_BRANCH, ORIG_REPO_DEVEL_BRANCH )
self.assertEqual( errCode, 0, "FAILED - \n%s\n" % out )
out, errCode = self.swgitUtil_Repo_.modify_repo( TEST_REPO_FILE_A, msg = "1. modify to create commit", gotoint = False )
self.assertEqual( errCode, 0, "FAILED - \n%s\n" % out )
out, errCode = self.swgitUtil_Repo_.modify_repo( TEST_REPO_FILE_A, msg = "2. modify to create commit", gotoint = False )
self.assertEqual( errCode, 0, "FAILED - \n%s\n" % out )
out, errCode = self.swgitUtil_Repo_.tag_create( "DEV", msg = "some modifications on origin" )
self.assertEqual( errCode, 0, "FAILED - \n%s\n" % out )
out, errCode = self.swgitUtil_Repo_.branch_switch_to_br( ORIG_REPO_DEVEL_BRANCH )
self.assertEqual( errCode, 0, "FAILED - \n%s\n" % out )
out, errCode = self.swgitUtil_Repo_.merge( self.ORIG_MOD_DEV )
self.assertEqual( errCode, 0, "FAILED - \n%s\n" % out )
# clone
out, errCode = swgit__utils.clone_repo( self.TAG_REPO_DIR, self.TAG_CLONE_DIR )
self.assertEqual( errCode, 0, "SWGIT clone FAILED - swgit__utils.clone_repo" )
# create branch
out, errCode = self.swgitUtil_Clone_.branch_create( self.BRANCH_NAME )
self.assertEqual( errCode, 0, "swgitUtil_Clone_.branch_create FAILED - \n%s\n" % out )
# modify a file
out, errCode = echo_on_file( self.MODIFY_FILE )
self.assertEqual( errCode, 0, "_utils.echo_on_file FAILED - \n%s\n" % out )
def clone_createBr( self, somecommmitondev = False, integrator = False ):
#first create repo
create_dir_some_file( self.TAG_REPO_DIR )
out, errCode = swgit__utils.init_dir( self.TAG_REPO_DIR )
self.assertEqual( errCode, 0, "SWGIT init FAILED - swgit__utils.init_dir - \n%s\n" % out )
if somecommmitondev:
#create also an empty commit on develop
out, errCode = self.swgitUtil_Repo_.branch_switch_to_br( ORIG_REPO_DEVEL_BRANCH )
self.assertEqual( errCode, 0, "FAILED - \n%s\n" % out )
out, errCode = self.swgitUtil_Repo_.modify_repo( TEST_REPO_FILE_A, msg = "d. da develop", gotoint = False )
self.assertEqual( errCode, 0, "FAILED - \n%s\n" % out )
out, errCode = self.swgitUtil_Repo_.modify_repo( TEST_REPO_FILE_A, msg = "d. da develop", gotoint = False )
self.assertEqual( errCode, 0, "FAILED - \n%s\n" % out )
#create a commit usefull for some tests
out, errCode = self.swgitUtil_Repo_.branch_create_src( self.ORIG_MOD_BRANCH, ORIG_REPO_DEVEL_BRANCH )
self.assertEqual( errCode, 0, "FAILED - \n%s\n" % out )
out, errCode = self.swgitUtil_Repo_.modify_repo( TEST_REPO_FILE_A, msg = "1. modify to create commit", gotoint = False )
self.assertEqual( errCode, 0, "FAILED - \n%s\n" % out )
out, errCode = self.swgitUtil_Repo_.modify_repo( TEST_REPO_FILE_A, msg = "2. modify to create commit", gotoint = False )
self.assertEqual( errCode, 0, "FAILED - \n%s\n" % out )
out, errCode = self.swgitUtil_Repo_.tag_create( "DEV", msg = "some modifications on origin" )
self.assertEqual( errCode, 0, "FAILED - \n%s\n" % out )
out, errCode = self.swgitUtil_Repo_.branch_switch_to_br( ORIG_REPO_DEVEL_BRANCH )
self.assertEqual( errCode, 0, "FAILED - \n%s\n" % out )
out, errCode = self.swgitUtil_Repo_.merge( self.ORIG_MOD_DEV )
self.assertEqual( errCode, 0, "FAILED - \n%s\n" % out )
# clone
if not integrator:
out, errCode = swgit__utils.clone_repo( self.TAG_REPO_DIR, self.TAG_CLONE_DIR )
self.assertEqual( errCode, 0, "SWGIT clone FAILED - swgit__utils.clone_repo" )
else:
out, errCode = swgit__utils.clone_repo_integrator( self.TAG_REPO_DIR, self.TAG_CLONE_DIR )
self.assertEqual( errCode, 0, "SWGIT clone FAILED - swgit__utils.clone_repo" )
# create branch
out, errCode = self.swgitUtil_Clone_.branch_create( self.BRANCH_NAME )
self.assertEqual( errCode, 0, "swgitUtil_Clone_.branch_create FAILED - \n%s\n" % out )
def modify_and_commit( self, alsotags = True ):
# modify a file
out, errCode = echo_on_file( self.MODIFY_FILE )
self.assertEqual( errCode, 0, "_utils.echo_on_file FAILED - \n%s\n" % out )
#create all custom tags
if alsotags:
self.swgitUtil_Clone_.tag_define_all_100_custom()
# getsha before commit
sha_before, errCode = self.gitUtil_Clone_.get_currsha()
self.assertEqual( errCode, 0, "gitUtil_Clone_.get_currsha FAILED - \n%s\n" % sha_before )
# commit
out, errCode = self.swgitUtil_Clone_.commit_minusA()
self.assertEqual( errCode, 0, "swgitUtil_Clone_.commit_minusA FAILED - \n%s\n" % out )
# getsha after commit
sha_after, errCode = self.gitUtil_Clone_.get_currsha()
self.assertEqual( errCode, 0, "gitUtil_Clone_.get_currsha FAILED - \n%s\n" % sha_after )
self.assertNotEqual( sha_before, sha_after, "swgitUtil_Clone_.commit_minusA FAILED - after commit SAME sha as before\n%s\n" % sha_after )
# getsha previous HEAD
sha_minusOne, errCode = self.gitUtil_Clone_.get_currsha( "HEAD~1" )
self.assertEqual( errCode, 0, "gitUtil_Clone_.get_currsha FAILED - \n%s\n" % sha_minusOne )
self.assertEqual( sha_before, sha_minusOne, "swgitUtil_Clone_.commit_minusA FAILED - sha_minusOne not same as sha_before\n%s\n" % sha_minusOne )
def test_Tag_01_00_nothingDone( self ):
#first create repo
create_dir_some_file( self.TAG_REPO_DIR )
out, errCode = swgit__utils.init_dir( self.TAG_REPO_DIR )
self.assertEqual( errCode, 0, "SWGIT init FAILED - swgit__utils.init_dir - \n%s\n" % out )
# clone
out, errCode = swgit__utils.clone_repo( self.TAG_REPO_DIR, self.TAG_CLONE_DIR )
self.assertEqual( errCode, 0, "SWGIT clone FAILED - swgit__utils.clone_repo" )
# create branch
out, errCode = self.swgitUtil_Clone_.branch_create( self.BRANCH_NAME )
self.assertEqual( errCode, 0, "swgitUtil_Clone_.branch_create FAILED - \n%s\n" % out )
# getsha before commit
sha_before, errCode = self.gitUtil_Clone_.get_currsha()
self.assertEqual( errCode, 0, "gitUtil_Clone_.get_currsha FAILED - \n%s\n" % out )
# tag nothing
out, errCode = self.swgitUtil_Clone_.tag_dev()
self.assertNotEqual( errCode, 0, "swgitUtil_Clone_.tag_dev MUST FAILED (tagging on branch just created without changes) - \n%s\n" % out )
# getsha after commit
sha_after, errCode = self.gitUtil_Clone_.get_currsha()
self.assertEqual( errCode, 0, "gitUtil_Clone_.get_currsha FAILED - \n%s\n" % out )
self.assertEqual( sha_before, sha_after, "swgitUtil_Clone_.tag_dev FAILED - after commit no-op, not same sha as before\n%s\n" % out )
# tag nothing
out, errCode = self.swgitUtil_Clone_.tag_fix( self.DDTS_0 )
self.assertNotEqual( errCode, 0, "swgitUtil_Clone_.tag_fix MUST FAILED - \n%s\n" % out )
# getsha after commit
sha_after, errCode = self.gitUtil_Clone_.get_currsha()
self.assertEqual( errCode, 0, "gitUtil_Clone_.get_currsha FAILED - \n%s\n" % out )
self.assertEqual( sha_before, sha_after, "swgitUtil_Clone_.tag_fix FAILED - after commit no-op, not same sha as before\n%s\n" % out )
def test_Tag_01_01_noAdd( self ):
self.clone_createBr_modify()
# getsha before commit
sha_before, errCode = self.gitUtil_Clone_.get_currsha()
self.assertEqual( errCode, 0, "gitUtil_Clone_.get_currsha FAILED - \n%s\n" % sha_before )
# commit but not added
out, errCode = self.swgitUtil_Clone_.tag_dev()
self.assertNotEqual( errCode, 0, "swgitUtil_Clone_.tag_dev MUST FAILED (tagging dev on branch with changes not added to index)- \n%s\n" % out )
# getsha after commit
sha_after, errCode = self.gitUtil_Clone_.get_currsha()
self.assertEqual( errCode, 0, "gitUtil_Clone_.get_currsha FAILED - \n%s\n" % sha_after )
self.assertEqual( sha_before, sha_after, "swgitUtil_Clone_.tag_dev FAILED - after commit no-op, not same sha as before\n%s\n" % sha_after )
# check TAG MUST NOT exists on HEAD
tag, errCode = self.gitUtil_Clone_.tag_get( "DEV" )
self.assertNotEqual( errCode, 0, "gitUtil_Clone_.tag_get( DEV ) MUST FAILED - \n%s\n" % tag )
tag, errCode = self.gitUtil_Clone_.tag_get( "FIX" )
self.assertNotEqual( errCode, 0, "gitUtil_Clone_.tag_get( FIX ) MUST FAILED - \n%s\n" % tag )
# commit but not added
out, errCode = self.swgitUtil_Clone_.tag_fix( self.DDTS_0 )
self.assertNotEqual( errCode, 0, "swgitUtil_Clone_.tag_fix MUST FAILED (tagging fix on branch with changes not added to index)- \n%s\n" % out )
# getsha after commit
sha_after, errCode = self.gitUtil_Clone_.get_currsha()
self.assertEqual( errCode, 0, "gitUtil_Clone_.get_currsha FAILED - \n%s\n" % sha_after )
self.assertEqual( sha_before, sha_after, "swgitUtil_Clone_.tag_fix FAILED - after commit no-op, not same sha as before\n%s\n" % sha_after )
# check TAG MUST NOT exists on HEAD
tag, errCode = self.gitUtil_Clone_.tag_get( "DEV" )
self.assertNotEqual( errCode, 0, "gitUtil_Clone_.tag_get( DEV ) MUST FAILED - \n%s\n" % tag )
tag, errCode = self.gitUtil_Clone_.tag_get( "FIX" )
self.assertNotEqual( errCode, 0, "gitUtil_Clone_.tag_get( FIX ) MUST FAILED - \n%s\n" % tag )
def test_Tag_02_00_DEV( self ):
self.clone_createBr()
self.modify_and_commit()
commit_sha, errCode = self.gitUtil_Clone_.get_currsha()
self.assertEqual( errCode, 0, "gitUtil_Clone_.get_currsha FAILED - \n%s\n" % commit_sha )
# tag dev
out, errCode = self.swgitUtil_Clone_.tag_dev()
self.assertEqual( errCode, 0, "swgitUtil_Clone_.tag_dev FAILED - \n%s\n" % out )
# check TAG exists on HEAD, FIX does not exist
tag, errCode = self.gitUtil_Clone_.tag_get( "DEV" )
self.assertEqual( errCode, 0, "gitUtil_Clone_.tag_get( DEV ) FAILED - \n%s\n" % tag )
self.assertEqual( tag, self.CREATED_DEV_0, "gitUtil_Clone_.tag_dev FAILED not put DEV label - \n%s\n" % self.CREATED_DEV_0 )
tag, errCode = self.gitUtil_Clone_.tag_get( "FIX" )
self.assertNotEqual( errCode, 0, "gitUtil_Clone_.tag_get( FIX ) MUST FAILED - \n%s\n" % tag )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( self.CREATED_DEV_0 )
self.assertEqual( errCode, 0, "swgitUtil_Clone_.ref2sha FAILED - \n%s\n" % tag0_sha )
self.assertEqual( commit_sha, tag0_sha, "swgitUtil_Clone_.tag_fix FAILED (commit sha different from *tag sha - \n%s\n" % tag0_sha )
#
# Another loop
#
self.modify_and_commit()
commit1_sha, errCode = self.gitUtil_Clone_.get_currsha()
self.assertEqual( errCode, 0, "gitUtil_Clone_.get_currsha FAILED - \n%s\n" % commit1_sha )
self.assertNotEqual( commit_sha, commit1_sha, "self.modify_and_commit FAILED (not different commits) - \n%s\n" % commit1_sha )
# tag dev
out, errCode = self.swgitUtil_Clone_.tag_dev()
self.assertEqual( errCode, 0, "swgitUtil_Clone_.tag_dev FAILED - \n%s\n" % out )
# check TAG exists on HEAD
tag, errCode = self.gitUtil_Clone_.tag_get( "DEV" )
self.assertEqual( errCode, 0, "gitUtil_Clone_.tag_get( DEV ) FAILED - \n%s\n" % tag )
self.assertEqual( tag, self.CREATED_DEV_1, "gitUtil_Clone_.tag_dev FAILED not put DEV label - \n%s\n" % self.CREATED_DEV_1 )
tag, errCode = self.gitUtil_Clone_.tag_get( "FIX" )
self.assertNotEqual( errCode, 0, "gitUtil_Clone_.tag_get( FIX ) MUST FAILED - \n%s\n" % tag )
tag1_sha, errCode = self.gitUtil_Clone_.ref2sha( self.CREATED_DEV_1 )
self.assertEqual( errCode, 0, "swgitUtil_Clone_.ref2sha FAILED - \n%s\n" % tag1_sha )
self.assertEqual( commit1_sha, tag1_sha, "swgitUtil_Clone_.tag_fix FAILED (commit sha different from *tag sha - \n%s\n" % tag1_sha )
#
# Another loop
#
self.modify_and_commit()
commit2_sha, errCode = self.gitUtil_Clone_.get_currsha()
self.assertEqual( errCode, 0, "gitUtil_Clone_.get_currsha FAILED - \n%s\n" % commit2_sha )
self.assertNotEqual( commit_sha, commit1_sha, "self.modify_and_commit FAILED (not different commits) - \n%s\n" % commit1_sha )
self.assertNotEqual( commit_sha, commit2_sha, "self.modify_and_commit FAILED (not different commits) - \n%s\n" % commit2_sha )
self.assertNotEqual( commit1_sha, commit2_sha, "self.modify_and_commit FAILED (not different commits) - \n%s\n" % commit2_sha )
# tag dev
out, errCode = self.swgitUtil_Clone_.tag_dev()
self.assertEqual( errCode, 0, "swgitUtil_Clone_.tag_dev FAILED - \n%s\n" % out )
# check TAG exists on HEAD
tag, errCode = self.gitUtil_Clone_.tag_get( "DEV" )
self.assertEqual( errCode, 0, "gitUtil_Clone_.tag_get( DEV ) FAILED - \n%s\n" % tag )
self.assertEqual( tag, self.CREATED_DEV_2, "gitUtil_Clone_.tag_dev FAILED not put DEV label - \n%s\n" % self.CREATED_DEV_2 )
tag, errCode = self.gitUtil_Clone_.tag_get( "FIX" )
self.assertNotEqual( errCode, 0, "gitUtil_Clone_.tag_get( FIX ) MUST FAILED - \n%s\n" % tag )
tag2_sha, errCode = self.gitUtil_Clone_.ref2sha( self.CREATED_DEV_2 )
self.assertEqual( errCode, 0, "swgitUtil_Clone_.ref2sha FAILED - \n%s\n" % tag2_sha )
self.assertEqual( commit2_sha, tag2_sha, "swgitUtil_Clone_.tag_fix FAILED (commit sha different from *tag sha - \n%s\n" % tag2_sha )
def test_Tag_02_01_DEV_on_DEV( self ):
self.clone_createBr()
self.modify_and_commit()
# tag dev
out, errCode = self.swgitUtil_Clone_.tag_dev()
self.assertEqual( errCode, 0, "swgitUtil_Clone_.tag_dev FAILED - \n%s\n" % out )
# re-tag dev
out, errCode = self.swgitUtil_Clone_.tag_dev()
self.assertNotEqual( errCode, 0, "swgitUtil_Clone_.tag_dev MUST FAILED (Already a DEV on this commit)- \n%s\n" % out )
def test_Tag_03_00_FIX_WrongsParams( self ):
self.clone_createBr()
self.modify_and_commit()
out, errCode = self.swgitUtil_Clone_.tag_fix( "\"\"" )
self.assertNotEqual( errCode, 0, "swgitUtil_Clone_.tag_dev MUST FAILED (no ddts passed) - \n%s\n" % out )
out, errCode = self.swgitUtil_Clone_.tag_fix( "ISSUE12345" )
self.assertNotEqual( errCode, 0, "swgitUtil_Clone_.tag_dev MUST FAILED (no regexp satisfied) - \n%s\n" % out )
def test_Tag_03_01_FIX( self ):
self.clone_createBr()
self.modify_and_commit()
# tag fix
out, errCode = self.swgitUtil_Clone_.tag_fix( self.DDTS_0 )
self.assertEqual( errCode, 0, "swgitUtil_Clone_.tag_dev FAILED - \n%s\n" % out )
# check TAG exists on HEAD, DEV does not exist
tag, errCode = self.gitUtil_Clone_.tag_get( "FIX" )
self.assertEqual( errCode, 0, "gitUtil_Clone_.tag_get( FIX ) FAILED - \n%s\n" % tag )
self.assertEqual( tag, self.CREATED_FIX_0, "gitUtil_Clone_.tag_fix FAILED not put FIX label - \n%s\n" % self.CREATED_FIX_0 )
tag, errCode = self.gitUtil_Clone_.tag_get( "DEV" )
self.assertNotEqual( errCode, 0, "gitUtil_Clone_.tag_get( DEV ) MUST FAILED - \n%s\n" % tag )
#
# Another loop
#
self.modify_and_commit()
# tag fix
out, errCode = self.swgitUtil_Clone_.tag_fix( self.DDTS_1 )
self.assertEqual( errCode, 0, "swgitUtil_Clone_.tag_dev FAILED - \n%s\n" % out )
# check TAG exists on HEAD, DEV does not exist
tag, errCode = self.gitUtil_Clone_.tag_get( "FIX" )
self.assertEqual( errCode, 0, "gitUtil_Clone_.tag_get( FIX ) FAILED - \n%s\n" % tag )
self.assertEqual( tag, self.CREATED_FIX_1, "gitUtil_Clone_.tag_fix FAILED not put FIX label - \n%s\n" % self.CREATED_FIX_1 )
tag, errCode = self.gitUtil_Clone_.tag_get( "DEV" )
self.assertNotEqual( errCode, 0, "gitUtil_Clone_.tag_get( DEV ) MUST FAILED - \n%s\n" % tag )
#
# Another loop
#
self.modify_and_commit()
# tag fix
out, errCode = self.swgitUtil_Clone_.tag_fix( self.DDTS_2 )
self.assertEqual( errCode, 0, "swgitUtil_Clone_.tag_dev FAILED - \n%s\n" % out )
# check TAG exists on HEAD, DEV does not exist
tag, errCode = self.gitUtil_Clone_.tag_get( "FIX" )
self.assertEqual( errCode, 0, "gitUtil_Clone_.tag_get( FIX ) FAILED - \n%s\n" % tag )
self.assertEqual( tag, self.CREATED_FIX_2, "gitUtil_Clone_.tag_fix FAILED not put FIX label - \n%s\n" % self.CREATED_FIX_2 )
tag, errCode = self.gitUtil_Clone_.tag_get( "DEV" )
self.assertNotEqual( errCode, 0, "gitUtil_Clone_.tag_get( DEV ) MUST FAILED - \n%s\n" % tag )
def test_Tag_03_02_FIX_on_same_FIX( self ):
self.clone_createBr()
self.modify_and_commit()
commit_sha, errCode = self.gitUtil_Clone_.get_currsha()
self.assertEqual( errCode, 0, "gitUtil_Clone_.get_currsha FAILED - \n%s\n" % commit_sha )
# tag fix
out, errCode = self.swgitUtil_Clone_.tag_fix( self.DDTS_0 )
self.assertEqual( errCode, 0, "swgitUtil_Clone_.tag_dev FAILED - \n%s\n" % out )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( self.CREATED_FIX_0 )
self.assertEqual( errCode, 0, "swgitUtil_Clone_.ref2sha FAILED - \n%s\n" % tag0_sha )
self.assertEqual( commit_sha, tag0_sha, "swgitUtil_Clone_.tag_fix FAILED (commit sha different from *tag sha - \n%s\n" % tag0_sha )
# re-tag same fix
out, errCode = self.swgitUtil_Clone_.tag_fix( self.DDTS_0 )
self.assertNotEqual( errCode, 0, "swgitUtil_Clone_.tag_dev FAILED - \n%s\n" % out )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( self.CREATED_FIX_0 )
self.assertEqual( errCode, 0, "swgitUtil_Clone_.ref2sha FAILED - \n%s\n" % tag0_sha )
self.assertEqual( commit_sha, tag0_sha, "swgitUtil_Clone_.tag_fix FAILED (commit sha different from *tag sha - \n%s\n" % tag0_sha )
def test_Tag_03_03_FIX_on_other_FIX( self ):
self.clone_createBr()
self.modify_and_commit()
commit_sha, errCode = self.gitUtil_Clone_.get_currsha()
self.assertEqual( errCode, 0, "gitUtil_Clone_.get_currsha FAILED - \n%s\n" % commit_sha )
# tag fix
out, errCode = self.swgitUtil_Clone_.tag_fix( self.DDTS_0 )
self.assertEqual( errCode, 0, "swgitUtil_Clone_.tag_dev FAILED - \n%s\n" % out )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( self.CREATED_FIX_0 )
self.assertEqual( errCode, 0, "swgitUtil_Clone_.ref2sha FAILED - \n%s\n" % tag0_sha )
self.assertEqual( commit_sha, tag0_sha, "swgitUtil_Clone_.tag_fix FAILED (commit sha different from *tag sha - \n%s\n" % tag0_sha )
# re-tag different fix
out, errCode = self.swgitUtil_Clone_.tag_fix( self.DDTS_1 )
self.assertEqual( errCode, 0, "swgitUtil_Clone_.tag_dev FAILED - \n%s\n" % out )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( self.CREATED_FIX_0 )
self.assertEqual( errCode, 0, "swgitUtil_Clone_.ref2sha FAILED - \n%s\n" % tag0_sha )
self.assertEqual( commit_sha, tag0_sha, "swgitUtil_Clone_.tag_fix FAILED (commit sha different from *tag0 sha - \n%s\n" % tag0_sha )
tag1_sha, errCode = self.gitUtil_Clone_.ref2sha( self.CREATED_FIX_1 )
self.assertEqual( errCode, 0, "swgitUtil_Clone_.ref2sha FAILED - \n%s\n" % tag1_sha )
self.assertEqual( commit_sha, tag1_sha, "swgitUtil_Clone_.tag_fix FAILED (commit sha different from *tag1 sha - \n%s\n" % tag1_sha )
self.assertEqual( tag0_sha, tag1_sha, "swgitUtil_Clone_.tag_fix FAILED (*tag0 sha different from *tag1 sha - \n%s\n" % tag1_sha )
# re-tag different fix
out, errCode = self.swgitUtil_Clone_.tag_fix( self.DDTS_2 )
self.assertEqual( errCode, 0, "swgitUtil_Clone_.tag_dev FAILED - \n%s\n" % out )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( self.CREATED_FIX_0 )
self.assertEqual( errCode, 0, "swgitUtil_Clone_.ref2sha FAILED - \n%s\n" % tag0_sha )
self.assertEqual( commit_sha, tag0_sha, "swgitUtil_Clone_.tag_fix FAILED (commit sha different from *tag0 sha - \n%s\n" % tag0_sha )
tag1_sha, errCode = self.gitUtil_Clone_.ref2sha( self.CREATED_FIX_1 )
self.assertEqual( errCode, 0, "swgitUtil_Clone_.ref2sha FAILED - \n%s\n" % tag1_sha )
self.assertEqual( commit_sha, tag1_sha, "swgitUtil_Clone_.tag_fix FAILED (commit sha different from *tag1 sha - \n%s\n" % tag1_sha )
self.assertEqual( tag0_sha, tag1_sha, "swgitUtil_Clone_.tag_fix FAILED (*tag0 sha different from *tag1 sha - \n%s\n" % tag1_sha )
tag2_sha, errCode = self.gitUtil_Clone_.ref2sha( self.CREATED_FIX_2 )
self.assertEqual( errCode, 0, "swgitUtil_Clone_.ref2sha FAILED - \n%s\n" % tag2_sha )
self.assertEqual( commit_sha, tag2_sha, "swgitUtil_Clone_.tag_fix FAILED (commit sha different from *tag2 sha - \n%s\n" % tag2_sha )
self.assertEqual( tag0_sha, tag1_sha, "swgitUtil_Clone_.tag_fix FAILED (*tag0 sha different from *tag1 sha - \n%s\n" % tag1_sha )
self.assertEqual( tag0_sha, tag2_sha, "swgitUtil_Clone_.tag_fix FAILED (*tag0 sha different from *tag2 sha - \n%s\n" % tag2_sha )
self.assertEqual( tag1_sha, tag2_sha, "swgitUtil_Clone_.tag_fix FAILED (*tag1 sha different from *tag2 sha - \n%s\n" % tag2_sha )
def test_Tag_03_04_LIV_cloneIntegrator( self ):
self.clone_createBr( integrator = True )
self.modify_and_commit()
out, errCode = self.swgitUtil_Clone_.branch_switch_to_br( ORIG_REPO_STABLE_BRANCH )
self.util_check_SUCC_scenario( out, errCode, "", "swicth to stable" )
out, errCode = echo_on_file( self.MODIFY_FILE )
self.util_check_SUCC_scenario( out, errCode, "", "modif file" )
out, errCode = self.swgitUtil_Clone_.commit_minusA( )
self.util_check_SUCC_scenario( out, errCode, "", "commit minua A on stable" )
out, errCode = self.swgitUtil_Clone_.tag_create( "LIV", "Drop.B", msg = "Droppppp" )
self.util_check_SUCC_scenario( out, errCode, "",
"LIV inside integration repo" )
def test_Tag_03_05_LIV_RepoConvertedIntoIntegrator( self ):
self.clone_createBr()
self.modify_and_commit()
out, errCode = self.swgitUtil_Clone_.tag_create( "LIV", "Drop.B", msg = "Droppppp" )
self.util_check_DENY_scenario( out, errCode,
"Tag LIV can be created/deleted only on",
"LIV inside NOT integration repo" )
#transform repo into integrator
self.swgitUtil_Clone_.set_cfg( "swgit.integrator", "True" )
#some cheks only for this particualr repo....
out, errCode = self.swgitUtil_Clone_.tag_create( "LIV", "Drop.B", msg = "Droppppp" )
self.util_check_DENY_scenario( out, errCode,
"Label LIV cannot be put on branch type FTR",
"LIV inside NOT integration repo" )
out, errCode = self.swgitUtil_Clone_.branch_switch_to_br( ORIG_REPO_STABLE_BRANCH )
self.util_check_SUCC_scenario( out, errCode, "", "swicth to stable" )
out, errCode = self.swgitUtil_Clone_.tag_create( "LIV", "Drop.B", msg = "Droppppp" )
self.util_check_DENY_scenario( out, errCode,
#"You already have a LIV label on this commit:",
"You must have a new commit to tag",
"LIV inside NOT integration repo" )
#create commit on stable
out, errCode = echo_on_file( self.MODIFY_FILE )
self.util_check_SUCC_scenario( out, errCode, "", "modif file" )
out, errCode = self.swgitUtil_Clone_.commit_minusA( )
self.util_check_SUCC_scenario( out, errCode, "", "commit minua A on stable" )
out, errCode = self.swgitUtil_Clone_.tag_create( "LIV", "Drop.B", msg = "Droppppp" )
self.util_check_SUCC_scenario( out, errCode, "", "LIV inside integration repo" )
def test_Tag_04_00_Replace_DEV( self ):
self.clone_createBr()
self.modify_and_commit()
commit0_sha, errCode = self.gitUtil_Clone_.get_currsha()
self.assertEqual( errCode, 0, "gitUtil_Clone_.get_currsha FAILED - \n%s\n" % commit0_sha )
# tag dev
out, errCode = self.swgitUtil_Clone_.tag_dev()
self.assertEqual( errCode, 0, "swgitUtil_Clone_.tag_dev FAILED - \n%s\n" % out )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( self.CREATED_DEV_0 )
self.assertEqual( errCode, 0, "swgitUtil_Clone_.ref2sha FAILED - \n%s\n" % tag0_sha )
self.assertEqual( commit0_sha, tag0_sha, "swgitUtil_Clone_.tag_fix FAILED (commit sha different from *tag sha - \n%s\n" % tag0_sha )
# modify, commit
self.modify_and_commit()
commitrep_sha, errCode = self.gitUtil_Clone_.get_currsha()
self.assertEqual( errCode, 0, "gitUtil_Clone_.get_currsha FAILED - \n%s\n" % commitrep_sha )
# move tag
out, errCode = self.swgitUtil_Clone_.tag_dev_replace()
self.assertEqual( errCode, 0, "swgitUtil_Clone_.tag_dev_replace FAILED - \n%s\n" % out )
tagrep_sha, errCode = self.gitUtil_Clone_.ref2sha( self.CREATED_DEV_0 )
self.assertEqual( errCode, 0, "swgitUtil_Clone_.ref2sha FAILED - \n%s\n" % tagrep_sha )
self.assertEqual( commit0_sha, tag0_sha, "swgitUtil_Clone_.tag_dev_replace FAILED (commit sha different from *tag0 sha - \n%s\n" % tag0_sha )
self.assertEqual( commitrep_sha, tagrep_sha, "swgitUtil_Clone_.tag_dev_replace FAILED (commitrep sha different from *tagrep sha - \n%s\n" % tagrep_sha )
self.assertNotEqual( commitrep_sha, commit0_sha, \
"swgitUtil_Clone_.tag_dev_replace FAILED (commitrep sha MUST be different from commit0_sha sha - \n%s\n" % commitrep_sha )
self.assertNotEqual( tagrep_sha, tag0_sha, \
"swgitUtil_Clone_.tag_dev_replace FAILED (*tagrep_sha sha MUST be different from *tag0_sha sha - \n%s\n" % tagrep_sha )
#
# Another loop
#
self.modify_and_commit()
commitrep2_sha, errCode = self.gitUtil_Clone_.get_currsha()
self.assertEqual( errCode, 0, "gitUtil_Clone_.get_currsha FAILED - \n%s\n" % commitrep2_sha )
# move tag
out, errCode = self.swgitUtil_Clone_.tag_dev_replace()
self.assertEqual( errCode, 0, "swgitUtil_Clone_.tag_dev_replace FAILED - \n%s\n" % out )
tagrep2_sha, errCode = self.gitUtil_Clone_.ref2sha( self.CREATED_DEV_0 )
self.assertEqual( errCode, 0, "swgitUtil_Clone_.ref2sha FAILED - \n%s\n" % tagrep2_sha )
self.assertEqual( commit0_sha, tag0_sha, "swgitUtil_Clone_.tag_dev_replace FAILED (commit sha different from *tag0 sha - \n%s\n" % tag0_sha )
self.assertEqual( commitrep_sha, tagrep_sha, "swgitUtil_Clone_.tag_dev_replace FAILED (commitrep sha different from *tagrep sha - \n%s\n" % tagrep_sha )
self.assertNotEqual( commitrep_sha, commit0_sha, \
"swgitUtil_Clone_.tag_dev_replace FAILED (commitrep sha MUST be different from commit0_sha sha - \n%s\n" % commitrep_sha )
self.assertNotEqual( tagrep_sha, tag0_sha, \
"swgitUtil_Clone_.tag_dev_replace FAILED (*tagrep_sha sha MUST be different from *tag0_sha sha - \n%s\n" % tagrep_sha )
self.assertEqual( errCode, 0, "swgitUtil_Clone_.ref2sha FAILED - \n%s\n" % tagrep2_sha )
self.assertEqual( commit0_sha, tag0_sha, "swgitUtil_Clone_.tag_dev_replace FAILED (commit0 sha different from *tag0 sha - \n%s\n" % tag0_sha )
self.assertEqual( commitrep_sha, tagrep_sha, "swgitUtil_Clone_.tag_dev_replace FAILED (commit1 sha different from *tagrep sha - \n%s\n" % tagrep_sha )
self.assertEqual( commitrep2_sha, tagrep2_sha, "swgitUtil_Clone_.tag_dev_replace FAILED (commitrep sha different from *tagrep sha - \n%s\n" % tagrep2_sha )
self.assertNotEqual( commitrep2_sha, commit0_sha, \
"swgitUtil_Clone_.tag_dev_replace FAILED (commitrep sha MUST be different from commit0_sha sha - \n%s\n" % commitrep2_sha )
self.assertNotEqual( commitrep2_sha, commitrep_sha, \
"swgitUtil_Clone_.tag_dev_replace FAILED (commitrep2 sha MUST be different from commitrep sha - \n%s\n" % commitrep2_sha )
self.assertNotEqual( commitrep_sha, commit0_sha, \
"swgitUtil_Clone_.tag_dev_replace FAILED (commitrep sha MUST be different from commit0 sha - \n%s\n" % commitrep2_sha )
self.assertNotEqual( tagrep2_sha, tag0_sha, \
"swgitUtil_Clone_.tag_dev_replace FAILED (*tagrep2_sha sha MUST be different from *tag0_sha sha - \n%s\n" % tagrep2_sha )
self.assertNotEqual( tagrep2_sha, tagrep_sha, \
"swgitUtil_Clone_.tag_dev_replace FAILED (*tagrep2_sha sha MUST be different from *tagrep sha - \n%s\n" % tagrep2_sha )
self.assertNotEqual( tagrep_sha, tag0_sha, \
"swgitUtil_Clone_.tag_dev_replace FAILED (*tagrep_sha sha MUST be different from *tag0_sha sha - \n%s\n" % tagrep2_sha )
def test_Tag_04_01_Replace_FIX( self ):
self.clone_createBr()
self.modify_and_commit()
commit0_sha, errCode = self.gitUtil_Clone_.get_currsha()
self.assertEqual( errCode, 0, "gitUtil_Clone_.get_currsha FAILED - \n%s\n" % commit0_sha )
# tag fix
out, errCode = self.swgitUtil_Clone_.tag_fix( self.DDTS_0 )
self.assertEqual( errCode, 0, "swgitUtil_Clone_.tag_fix FAILED - \n%s\n" % out )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( self.CREATED_FIX_0 )
self.assertEqual( errCode, 0, "swgitUtil_Clone_.ref2sha FAILED - \n%s\n" % tag0_sha )
self.assertEqual( commit0_sha, tag0_sha, "swgitUtil_Clone_.tag_fix FAILED (commit sha different from *tag sha - \n%s\n" % tag0_sha )
# modify, commit
self.modify_and_commit()
commitrep_sha, errCode = self.gitUtil_Clone_.get_currsha()
self.assertEqual( errCode, 0, "gitUtil_Clone_.get_currsha FAILED - \n%s\n" % commitrep_sha )
# move tag
out, errCode = self.swgitUtil_Clone_.tag_fix_replace( self.DDTS_0 )
self.assertEqual( errCode, 0, "swgitUtil_Clone_.tag_fix_replace FAILED - \n%s\n" % out )
tagrep_sha, errCode = self.gitUtil_Clone_.ref2sha( self.CREATED_FIX_0 )
self.assertEqual( errCode, 0, "swgitUtil_Clone_.ref2sha FAILED - \n%s\n" % tagrep_sha )
self.assertEqual( commit0_sha, tag0_sha, "swgitUtil_Clone_.tag_fix_replace FAILED (commit sha different from *tag0 sha - \n%s\n" % tag0_sha )
self.assertEqual( commitrep_sha, tagrep_sha, "swgitUtil_Clone_.tag_fix_replace FAILED (commitrep sha different from *tagrep sha - \n%s\n" % tagrep_sha )
self.assertNotEqual( commitrep_sha, commit0_sha, \
"swgitUtil_Clone_.tag_fix_replace FAILED (commitrep sha MUST be different from commit0_sha sha - \n%s\n" % commitrep_sha )
self.assertNotEqual( tagrep_sha, tag0_sha, \
"swgitUtil_Clone_.tag_fix_replace FAILED (*tagrep_sha sha MUST be different from *tag0_sha sha - \n%s\n" % tagrep_sha )
def test_Tag_05_00_DEV_on_INT( self ):
out, errCode = swgit__utils.clone_scripts_repo( self.TAG_CLONE_DIR )
out, errCode = echo_on_file( self.TAG_CLONE_DIR + ORIG_REPO_aFILE )
out, errCode = self.swgitUtil_Clone_.commit_minusA()
self.util_check_DENY_scenario( out, errCode,
"Cannot issue this command on integration branches except for",
"MUST FAIL commit" )
#simulate erge conflict
out, errCode = self.swgitUtil_Clone_.system_unix( "touch .git/MERGE_HEAD" )
out, errCode = self.swgitUtil_Clone_.commit_minusA()
self.util_check_SUCC_scenario( out, errCode,
"",
"FAILED commit" )
#tag create
out, errCode = self.swgitUtil_Clone_.tag_create( "DEV", msg = "some modifications on origin" )
self.util_check_DENY_scenario( out, errCode,
"Label DEV cannot be put on branch type INT",
"FAILED commit" )
def test_Tag_05_01_DEV_on_FTR_asINTbr( self ):
self.clone_createBr_modify()
out, errCode = self.swgitUtil_Clone_.int_branch_set( self.CREATED_BR )
self.util_check_SUCC_scenario( out, errCode, "", "FAILED set int br %s" % self.CREATED_BR )
# commit must fail except for resolve conflict
out, errCode = self.swgitUtil_Clone_.commit_minusA()
self.util_check_DENY_scenario( out, errCode,
"Cannot issue this command on integration branches except for",
"MUST FAIL commit" )
#simulate erge conflict, commit must go
out, errCode = self.swgitUtil_Clone_.system_unix( "touch .git/MERGE_HEAD" )
out, errCode = self.swgitUtil_Clone_.commit_minusA()
self.util_check_SUCC_scenario( out, errCode, "", "FAILED commit" )
#tag create, allow it on FTR.
out, errCode = self.swgitUtil_Clone_.tag_create( "DEV", msg = "some modifications on origin" )
self.util_check_SUCC_scenario( out, errCode, "", "FAILED tagging DEV on FTR" )
def test_Tag_06_00_CustomTags_SimpleTag( self ):
self.clone_createBr()
self.modify_and_commit()
commit0_sha, errCode = self.gitUtil_Clone_.get_currsha()
self.assertEqual( errCode, 0, "gitUtil_Clone_.get_currsha FAILED - \n%s\n" % commit0_sha )
out, errCode = self.swgitUtil_Clone_.tag_create( "NOTEXISTS", msg = "something" )
self.assertEqual( errCode, 1, "MUST FAIL creation of not existing label - \n%s\n" % out )
#define custom tags
self.swgitUtil_Clone_.tag_define_custom_tag( CUSTTAG_NUM )
self.swgitUtil_Clone_.tag_define_custom_tag( CUSTTAG_NAME )
# verify always can ypu put a DEV (default labels must never disappear)
out, errCode = self.swgitUtil_Clone_.tag_create( "dev", msg = "createring tag dev" )
self.assertEqual( errCode, 0, "FAILED tag DEV - \n%s\n" % out )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( self.CREATED_DEV_0 )
self.assertEqual( errCode, 0, "FAILED retrieving DEV - \n%s\n" % tag0_sha )
self.assertEqual( commit0_sha, tag0_sha, "FAILED creating tag: commit sha (%s) different from tag sha (%s)" % ( commit0_sha, tag0_sha) )
#simple numtag, with val without msg
out, errCode = self.swgitUtil_Clone_.tag_create( CUSTTAG_NUM["tagtype"], "avalue" )
self.assertEqual( errCode, 1, "MUST FAIL CUSTTAG NUM plus value - \n%s\n" % out )
#simple numtag, with val and with msg
out, errCode = self.swgitUtil_Clone_.tag_create( CUSTTAG_NUM["tagtype"], "avalue", msg = "something" )
self.assertEqual( errCode, 1, "MUST FAIL CUSTTAG NUM plus value - \n%s\n" % out )
#simple numtag, without val and without msg
out, errCode = self.swgitUtil_Clone_.tag_create( CUSTTAG_NUM["tagtype"], "avalue" )
self.assertEqual( errCode, 1, "MUST FAIL CUSTTAG NUM without msg and hook - \n%s\n" % out )
#simple numtag correct
out, errCode = self.swgitUtil_Clone_.tag_create( CUSTTAG_NUM["tagtype"], msg = "something" )
self.assertEqual( errCode, 0, "FAILED CUSTTAG NUM creation - \n%s\n" % out )
created_custtag_label = "%s/%s/000" % ( self.CREATED_BR, CUSTTAG_NUM["tagtype"] )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label )
self.assertEqual( errCode, 0, "FAILED retrieving tag %s - \n%s\n" % (created_custtag_label, tag0_sha) )
self.assertEqual( commit0_sha, tag0_sha, "FAILED creating tag: commit sha (%s) different from tag sha (%s)" % ( commit0_sha, tag0_sha) )
#simple nametag, without val without msg
out, errCode = self.swgitUtil_Clone_.tag_create( CUSTTAG_NAME["tagtype"] )
self.assertEqual( errCode, 1, "MUST FAIL CUSTTAG NAME without value - \n%s\n" % out )
#simple nametag, without val with msg
out, errCode = self.swgitUtil_Clone_.tag_create( CUSTTAG_NAME["tagtype"], msg = "someother" )
self.assertEqual( errCode, 1, "MUST FAIL CUSTTAG NAME without value - \n%s\n" % out )
#simple nametag, with val without msg
out, errCode = self.swgitUtil_Clone_.tag_create( CUSTTAG_NAME["tagtype"] )
self.assertEqual( errCode, 1, "MUST FAIL CUSTTAG NAME without message - \n%s\n" % out )
#simple nametag correct
value = "DropAB_2"
out, errCode = self.swgitUtil_Clone_.tag_create( CUSTTAG_NAME["tagtype"], value, msg = "someother first regexp" )
self.assertEqual( errCode, 0, "FAILED CUSTTAG NAME respecting FIRST regexp - \n%s\n" % out )
created_custtag_label_1 = "%s/%s/%s" % ( self.CREATED_BR, CUSTTAG_NAME["tagtype"], value )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label_1 )
self.assertEqual( errCode, 0, "FAILED retrieving tag %s - \n%s\n" % (created_custtag_label_1, tag0_sha) )
self.assertEqual( commit0_sha, tag0_sha,
"FAILED creating tag CUSTTAG_NAME with first regexp commit sha (%s) different from tag sha (%s)" % ( commit0_sha, tag0_sha) )
value = "Issue12345"
out, errCode = self.swgitUtil_Clone_.tag_create( CUSTTAG_NAME["tagtype"], value , msg = "someother second regexp" )
self.assertEqual( errCode, 0, "FAILED CUSTTAG NAME respecting SECOND regexp - \n%s\n" % out )
created_custtag_label_2 = "%s/%s/%s" % ( self.CREATED_BR, CUSTTAG_NAME["tagtype"], value )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label_2 )
self.assertEqual( errCode, 0, "FAILED retrieving tag %s - \n%s\n" % (created_custtag_label_2, tag0_sha) )
self.assertEqual( commit0_sha, tag0_sha,
"FAILED creating tag CUSTTAG_NAME with second regexp commit sha (%s) different from tag sha (%s)" % ( commit0_sha, tag0_sha) )
def test_Tag_06_01_CustomTags_SimpleTag_WrongValues( self ):
self.clone_createBr()
self.modify_and_commit()
commit0_sha, errCode = self.gitUtil_Clone_.get_currsha()
self.assertEqual( errCode, 0, "gitUtil_Clone_.get_currsha FAILED - \n%s\n" % commit0_sha )
#define custom tags
wrong_num_custtag_push = copy.deepcopy( CUSTTAG_NUM )
wrong_name_custtag_push = copy.deepcopy( CUSTTAG_NAME )
wrong_num_custtag_push["push_on_origin"] = "CICCIO"
wrong_name_custtag_push["push_on_origin"] = "CICCIO"
self.swgitUtil_Clone_.tag_define_custom_tag( wrong_num_custtag_push )
self.swgitUtil_Clone_.tag_define_custom_tag( wrong_name_custtag_push )
#must fail creation
out, errCode = self.swgitUtil_Clone_.tag_create( wrong_num_custtag_push["tagtype"], msg = "something" )
self.assertEqual( errCode, 1, "MUST FAIL WRONG val \"push\" CUSTTAG NUM creation - \n%s\n" % out )
#check not exists label
created_custtag_label = "%s/%s/000" % ( self.CREATED_BR, wrong_num_custtag_push["tagtype"] )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label )
self.assertEqual( errCode, 1, "MUST NOT EXISTS tag %s - \n%s\n" % (created_custtag_label, tag0_sha) )
#must fail creation
value = "DropAB_2"
out, errCode = self.swgitUtil_Clone_.tag_create( wrong_name_custtag_push["tagtype"], value, msg = "someother first regexp" )
self.assertEqual( errCode, 1, "MUST FAIL WRONG val \"push\" CUSTTAG NAME creation - \n%s\n" % out )
#check not exists label
created_custtag_label_1 = "%s/%s/%s" % ( self.CREATED_BR, wrong_name_custtag_push["tagtype"], value )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label_1 )
self.assertEqual( errCode, 1, "FAILED retrieving tag %s - \n%s\n" % (created_custtag_label_1, tag0_sha) )
def test_Tag_06_02_CustomTags_Hook_PreTag_Local( self ):
self.clone_createBr()
self.modify_and_commit()
commit0_sha, errCode = self.gitUtil_Clone_.get_currsha()
self.assertEqual( errCode, 0, "gitUtil_Clone_.get_currsha FAILED - \n%s\n" % commit0_sha )
#
# local script
#
#define custom tags
pretag_custtag_ECHO_num = copy.deepcopy( CUSTTAG_NUM )
pretag_custtag_NOECHO_num = copy.deepcopy( CUSTTAG_NUM )
pretag_custtag_ECHO_BUTFAILS_num = copy.deepcopy( CUSTTAG_NUM )
pretag_custtag_NOECHO_ANDFAILS_num = copy.deepcopy( CUSTTAG_NUM )
script_comment = "comment: ABCD"
out_ssh_string = "Executing pre-tag hook"
pretag_custtag_ECHO_num["tagtype"] = "NUMTAG_PREHOOK_ECHO"
pretag_custtag_ECHO_num["hook_pretag_script"] = "echo \"%s\"" % script_comment
pretag_custtag_NOECHO_num["tagtype"] = "NAMETAG_PREHOOK_NOECHO"
pretag_custtag_NOECHO_num["hook_pretag_script"] = "echo \"AAA\" > /dev/null"
pretag_custtag_ECHO_BUTFAILS_num["tagtype"] = "NUMTAG_PREHOOK_ECHO_BUTFAILS"
pretag_custtag_ECHO_BUTFAILS_num["hook_pretag_script"] = "echo \"%s\" && return 1" % script_comment
pretag_custtag_NOECHO_ANDFAILS_num["tagtype"] = "NAMETAG_PREHOOK_NOECHO_AND_FAILS"
pretag_custtag_NOECHO_ANDFAILS_num["hook_pretag_script"] = "echo \"AAA\" > /dev/null && return 1"
self.swgitUtil_Clone_.tag_define_custom_tag( pretag_custtag_ECHO_num )
self.swgitUtil_Clone_.tag_define_custom_tag( pretag_custtag_NOECHO_num )
self.swgitUtil_Clone_.tag_define_custom_tag( pretag_custtag_ECHO_BUTFAILS_num )
self.swgitUtil_Clone_.tag_define_custom_tag( pretag_custtag_NOECHO_ANDFAILS_num )
#pretag ECHO numtag, without msg
out, errCode = self.swgitUtil_Clone_.tag_create( pretag_custtag_ECHO_num["tagtype"] )
self.util_check_SUCC_scenario( out, errCode,
out_ssh_string,
"tagging, local echo script" )
self.util_check_SUCC_scenario( out, errCode,
script_comment,
"tagging, local echo script" )
created_custtag_label = "%s/%s/000" % ( self.CREATED_BR, pretag_custtag_ECHO_num["tagtype"] )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label )
self.assertEqual( errCode, 0, "FAILED retrieving tag %s - \n%s\n" % (created_custtag_label, tag0_sha) )
self.assertEqual( commit0_sha, tag0_sha, "FAILED creating tag: commit sha (%s) different from tag sha (%s)" % ( commit0_sha, tag0_sha) )
tag_comment, errCode = self.gitUtil_Clone_.tag_get_subject( created_custtag_label )
self.assertEqual( errCode, 0, "FAILED retrieving tag comment for %s - \n%s\n" % (created_custtag_label, tag_comment) )
self.assertTrue( script_comment in tag_comment, "FAILED creating tag: comment wothout %s inside %s" % ( script_comment, tag_comment) )
#pretag ECHO numtag, with msg
user_comment = "MESSAGE IN A BOTTLE"
out, errCode = self.swgitUtil_Clone_.tag_create( pretag_custtag_ECHO_num["tagtype"], msg = user_comment )
self.util_check_SUCC_scenario( out, errCode,
out_ssh_string,
"tagging , echo script with message" )
self.util_check_SUCC_scenario( out, errCode,
script_comment,
"tagging , echo script with message" )
created_custtag_label_1 = "%s/%s/001" % ( self.CREATED_BR, pretag_custtag_ECHO_num["tagtype"] )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label_1 )
self.assertEqual( errCode, 0, "FAILED retrieving tag %s - \n%s\n" % (created_custtag_label_1, tag0_sha) )
self.assertEqual( commit0_sha, tag0_sha, "FAILED creating tag: commit sha (%s) different from tag sha (%s)" % ( commit0_sha, tag0_sha) )
tag_comment, errCode = self.gitUtil_Clone_.tag_get_subject( created_custtag_label_1 )
self.assertEqual( errCode, 0, "FAILED retrieving tag comment for %s - \n%s\n" % (created_custtag_label_1, tag_comment) )
self.assertTrue( script_comment in tag_comment, "FAILED creating tag: comment without hook output %s inside %s" % ( script_comment, tag_comment) )
self.assertTrue( user_comment in tag_comment, "FAILED creating tag: comment without user output %s inside %s" % ( user_comment, tag_comment) )
#pretag NOECHO numtag, without msg
out, errCode = self.swgitUtil_Clone_.tag_create( pretag_custtag_NOECHO_num["tagtype"] )
self.assertTrue( out_ssh_string in out, "FAILED tagging over ssh, noecho script" )
self.util_check_DENY_scenario( out, errCode,
"returned empty string. Please specify at least -m option",
"tagging, noecho script" )
created_custtag_label = "%s/%s/000" % ( self.CREATED_BR, pretag_custtag_NOECHO_num["tagtype"] )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label )
self.assertEqual( errCode, 1, "MUST FAIL retrieving tag %s - \n%s\n" % (created_custtag_label, tag0_sha) )
#pretag NOECHO numtag, with msg
user_comment = "MESSAGE IN A BOTTLE"
out, errCode = self.swgitUtil_Clone_.tag_create( pretag_custtag_NOECHO_num["tagtype"], msg = user_comment )
self.util_check_SUCC_scenario( out, errCode,
out_ssh_string,
"tagging ,local echo script with message" )
self.assertTrue( script_comment not in out, "FAILED tagging, noecho script" )
created_custtag_label_1 = "%s/%s/000" % ( self.CREATED_BR, pretag_custtag_NOECHO_num["tagtype"] )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label_1 )
self.assertEqual( errCode, 0, "FAILED retrieving tag %s - \n%s\n" % (created_custtag_label_1, tag0_sha) )
self.assertEqual( commit0_sha, tag0_sha, "FAILED creating tag: commit sha (%s) different from tag sha (%s)" % ( commit0_sha, tag0_sha) )
tag_comment, errCode = self.gitUtil_Clone_.tag_get_subject( created_custtag_label_1 )
self.assertEqual( errCode, 0, "FAILED retrieving tag comment for %s - \n%s\n" % (created_custtag_label_1, tag_comment) )
self.assertTrue( script_comment not in tag_comment, "FAILED creating tag: comment without hook output %s inside %s" % ( script_comment, tag_comment) )
self.assertTrue( user_comment in tag_comment, "FAILED creating tag: comment without user output %s inside %s" % ( user_comment, tag_comment) )
#pretag ECHO + FAILS numtag, without msg
# do not consider script output
out, errCode = self.swgitUtil_Clone_.tag_create( pretag_custtag_ECHO_BUTFAILS_num["tagtype"] )
self.util_check_DENY_scenario( out, errCode,
"returned error.",
"tagging with local pre-tag echo nomsg returning error" )
created_custtag_label = "%s/%s/000" % ( self.CREATED_BR, pretag_custtag_ECHO_BUTFAILS_num["tagtype"] )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label )
self.assertEqual( errCode, 1, "MUST FAIL retrieving tag %s - \n%s\n" % (created_custtag_label, tag0_sha) )
#pretag ECHO + FAILS numtag, with msg
user_comment = "MESSAGE IN A BOTTLE"
out, errCode = self.swgitUtil_Clone_.tag_create( pretag_custtag_ECHO_BUTFAILS_num["tagtype"], msg = user_comment )
self.util_check_DENY_scenario( out, errCode,
"FAILED - NUMTAG_PREHOOK_ECHO_BUTFAILS pre-tag hook",
"tagging with local pre-tag numbered returning error" )
created_custtag_label_1 = "%s/%s/000" % ( self.CREATED_BR, pretag_custtag_ECHO_BUTFAILS_num["tagtype"] )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label_1 )
self.assertEqual( errCode, 1, "MUST FAIL retrieving tag %s - \n%s\n" % (created_custtag_label_1, tag0_sha) )
self.util_check_DENY_scenario( out, errCode, "", "retrieving failed tag" )
#pretag NOECHO + FAILS numtag, without msg
out, errCode = self.swgitUtil_Clone_.tag_create( pretag_custtag_NOECHO_ANDFAILS_num["tagtype"] )
self.util_check_DENY_scenario( out, errCode,
"returned error.",
"tagging with local pre-tag numtag noecho returning error" )
created_custtag_label = "%s/%s/000" % ( self.CREATED_BR, pretag_custtag_NOECHO_ANDFAILS_num["tagtype"] )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label )
self.assertEqual( errCode, 1, "MUST FAIL retrieving tag %s - \n%s\n" % (created_custtag_label, tag0_sha) )
#pretag NOECHO = FAILS numtag, with msg
user_comment = "MESSAGE IN A BOTTLE"
out, errCode = self.swgitUtil_Clone_.tag_create( pretag_custtag_NOECHO_ANDFAILS_num["tagtype"], msg = user_comment )
self.util_check_DENY_scenario( out, errCode,
"FAILED - NAMETAG_PREHOOK_NOECHO_AND_FAILS pre-tag hook",
"tagging with local pre-tag named returning error" )
created_custtag_label_1 = "%s/%s/000" % ( self.CREATED_BR, pretag_custtag_NOECHO_ANDFAILS_num["tagtype"] )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label_1 )
self.util_check_DENY_scenario( out, errCode, "", "retrieving failed tag" )
def test_Tag_06_03_CustomTags_Hook_PreTag_OverSsh( self ):
self.clone_createBr()
self.modify_and_commit()
commit0_sha, errCode = self.gitUtil_Clone_.get_currsha()
self.assertEqual( errCode, 0, "gitUtil_Clone_.get_currsha FAILED - \n%s\n" % commit0_sha )
#
# remote scripts
#
#define custom tags
pretag_custtag_ECHOSSH_num = copy.deepcopy( CUSTTAG_NUM )
pretag_custtag_NOECHOSSH_num = copy.deepcopy( CUSTTAG_NUM )
pretag_custtag_ECHOSSH_BUTFAILS_num = copy.deepcopy( CUSTTAG_NUM )
pretag_custtag_NOECHOSSH_ANDFAILS_num = copy.deepcopy( CUSTTAG_NUM )
script_comment = "comment: ABCD"
out_ssh_string = "Executing pre-tag hook %s@%s" % (TEST_USER_SSH,TEST_ADDR)
pretag_custtag_ECHOSSH_num["tagtype"] = "NUMTAG_PREHOOK_ECHOSSH"
pretag_custtag_ECHOSSH_num["hook_pretag_script"] = "echo \"%s\"" % script_comment
pretag_custtag_ECHOSSH_num["hook_pretag_sshuser"] = TEST_USER_SSH
pretag_custtag_ECHOSSH_num["hook_pretag_sshaddr"] = TEST_ADDR
pretag_custtag_NOECHOSSH_num["tagtype"] = "NAMETAG_PREHOOK_NOECHOSSH"
pretag_custtag_NOECHOSSH_num["hook_pretag_script"] = "echo \"AAA\" > /dev/null"
pretag_custtag_NOECHOSSH_num["hook_pretag_sshuser"] = TEST_USER_SSH
pretag_custtag_NOECHOSSH_num["hook_pretag_sshaddr"] = TEST_ADDR
pretag_custtag_ECHOSSH_BUTFAILS_num["tagtype"] = "NUMTAG_PREHOOK_ECHOSSH_BUTFAILS"
pretag_custtag_ECHOSSH_BUTFAILS_num["hook_pretag_script"] = "echo \"%s\" && return 1" % script_comment
pretag_custtag_ECHOSSH_BUTFAILS_num["hook_pretag_sshuser"] = TEST_USER_SSH
pretag_custtag_ECHOSSH_BUTFAILS_num["hook_pretag_sshaddr"] = TEST_ADDR
pretag_custtag_NOECHOSSH_ANDFAILS_num["tagtype"] = "NAMETAG_PREHOOK_NOECHOSSH_AND_FAILS"
pretag_custtag_NOECHOSSH_ANDFAILS_num["hook_pretag_script"] = "echo \"AAA\" > /dev/null && return 1"
pretag_custtag_NOECHOSSH_ANDFAILS_num["hook_pretag_sshuser"] = TEST_USER_SSH
pretag_custtag_NOECHOSSH_ANDFAILS_num["hook_pretag_sshaddr"] = TEST_ADDR
self.swgitUtil_Clone_.tag_define_custom_tag( pretag_custtag_ECHOSSH_num )
self.swgitUtil_Clone_.tag_define_custom_tag( pretag_custtag_NOECHOSSH_num )
self.swgitUtil_Clone_.tag_define_custom_tag( pretag_custtag_ECHOSSH_BUTFAILS_num )
self.swgitUtil_Clone_.tag_define_custom_tag( pretag_custtag_NOECHOSSH_ANDFAILS_num )
#pretag ECHOSSH numtag, without msg
out, errCode = self.swgitUtil_Clone_.tag_create( pretag_custtag_ECHOSSH_num["tagtype"] )
self.util_check_SUCC_scenario( out, errCode,
out_ssh_string,
"tagging over ssh, echo script" )
self.util_check_SUCC_scenario( out, errCode,
script_comment,
"tagging over ssh, echo script" )
created_custtag_label = "%s/%s/000" % ( self.CREATED_BR, pretag_custtag_ECHOSSH_num["tagtype"] )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label )
self.assertEqual( errCode, 0, "FAILED retrieving tag %s - \n%s\n" % (created_custtag_label, tag0_sha) )
self.assertEqual( commit0_sha, tag0_sha, "FAILED creating tag: commit sha (%s) different from tag sha (%s)" % ( commit0_sha, tag0_sha) )
tag_comment, errCode = self.gitUtil_Clone_.tag_get_subject( created_custtag_label )
self.assertEqual( errCode, 0, "FAILED retrieving tag comment for %s - \n%s\n" % (created_custtag_label, tag_comment) )
self.assertTrue( script_comment in tag_comment, "FAILED creating tag: comment wothout %s inside %s" % ( script_comment, tag_comment) )
#pretag ECHOSSH numtag, with msg
user_comment = "MESSAGE IN A BOTTLE"
out, errCode = self.swgitUtil_Clone_.tag_create( pretag_custtag_ECHOSSH_num["tagtype"], msg = user_comment )
self.util_check_SUCC_scenario( out, errCode,
out_ssh_string,
"tagging over ssh, echo script with message" )
self.util_check_SUCC_scenario( out, errCode,
script_comment,
"tagging over ssh, echo script with message" )
created_custtag_label_1 = "%s/%s/001" % ( self.CREATED_BR, pretag_custtag_ECHOSSH_num["tagtype"] )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label_1 )
self.assertEqual( errCode, 0, "FAILED retrieving tag %s - \n%s\n" % (created_custtag_label_1, tag0_sha) )
self.assertEqual( commit0_sha, tag0_sha, "FAILED creating tag: commit sha (%s) different from tag sha (%s)" % ( commit0_sha, tag0_sha) )
tag_comment, errCode = self.gitUtil_Clone_.tag_get_subject( created_custtag_label_1 )
self.assertEqual( errCode, 0, "FAILED retrieving tag comment for %s - \n%s\n" % (created_custtag_label_1, tag_comment) )
self.assertTrue( script_comment in tag_comment, "FAILED creating tag: comment without hook output %s inside %s" % ( script_comment, tag_comment) )
self.assertTrue( user_comment in tag_comment, "FAILED creating tag: comment without user output %s inside %s" % ( user_comment, tag_comment) )
#pretag NOECHOSSH numtag, without msg
out, errCode = self.swgitUtil_Clone_.tag_create( pretag_custtag_NOECHOSSH_num["tagtype"] )
self.assertTrue( out_ssh_string in out, "FAILED tagging over ssh, noecho script" )
self.util_check_DENY_scenario( out, errCode,
"returned empty string. Please specify at least -m option",
"tagging over ssh, noecho script" )
created_custtag_label = "%s/%s/000" % ( self.CREATED_BR, pretag_custtag_NOECHOSSH_num["tagtype"] )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label )
self.assertEqual( errCode, 1, "MUST FAIL retrieving tag %s - \n%s\n" % (created_custtag_label, tag0_sha) )
#pretag NOECHOSSH numtag, with msg
user_comment = "MESSAGE IN A BOTTLE"
out, errCode = self.swgitUtil_Clone_.tag_create( pretag_custtag_NOECHOSSH_num["tagtype"], msg = user_comment )
self.util_check_SUCC_scenario( out, errCode,
out_ssh_string,
"tagging over ssh, no echo script with message" )
self.assertTrue( out_ssh_string in out, "FAILED tagging over ssh, noecho script with message" )
created_custtag_label_1 = "%s/%s/000" % ( self.CREATED_BR, pretag_custtag_NOECHOSSH_num["tagtype"] )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label_1 )
self.assertEqual( errCode, 0, "FAILED retrieving tag %s - \n%s\n" % (created_custtag_label_1, tag0_sha) )
self.assertEqual( commit0_sha, tag0_sha, "FAILED creating tag: commit sha (%s) different from tag sha (%s)" % ( commit0_sha, tag0_sha) )
tag_comment, errCode = self.gitUtil_Clone_.tag_get_subject( created_custtag_label_1 )
self.assertEqual( errCode, 0, "FAILED retrieving tag comment for %s - \n%s\n" % (created_custtag_label_1, tag_comment) )
self.assertTrue( script_comment not in tag_comment, "FAILED creating tag: comment without hook output %s inside %s" % ( script_comment, tag_comment) )
self.assertTrue( user_comment in tag_comment, "FAILED creating tag: comment without user output %s inside %s" % ( user_comment, tag_comment) )
#pretag ECHOSSH + FAILS numtag, without msg
# do not consider script output
out, errCode = self.swgitUtil_Clone_.tag_create( pretag_custtag_ECHOSSH_BUTFAILS_num["tagtype"] )
self.assertTrue( out_ssh_string in out, "FAILED tagging over ssh, echo script, but fails" )
self.util_check_DENY_scenario( out, errCode,
"FAILED - NUMTAG_PREHOOK_ECHOSSH_BUTFAILS pre-tag hook",
"tagging over ssh, echo script, but fails" )
created_custtag_label = "%s/%s/000" % ( self.CREATED_BR, pretag_custtag_ECHOSSH_BUTFAILS_num["tagtype"] )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label )
self.assertEqual( errCode, 1, "MUST FAIL retrieving tag %s - \n%s\n" % (created_custtag_label, tag0_sha) )
#pretag ECHOSSH + FAILS numtag, with msg
user_comment = "MESSAGE IN A BOTTLE"
out, errCode = self.swgitUtil_Clone_.tag_create( pretag_custtag_ECHOSSH_BUTFAILS_num["tagtype"], msg = user_comment )
self.assertTrue( out_ssh_string in out, "FAILED tagging over ssh, noecho script, but fails" )
self.util_check_DENY_scenario( out, errCode,
"FAILED - NUMTAG_PREHOOK_ECHOSSH_BUTFAILS pre-tag hook",
"tagging over ssh, noecho script, but fails" )
created_custtag_label_1 = "%s/%s/000" % ( self.CREATED_BR, pretag_custtag_ECHOSSH_BUTFAILS_num["tagtype"] )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label_1 )
self.util_check_DENY_scenario( out, errCode,
"FAILED - NUMTAG_PREHOOK_ECHOSSH_BUTFAILS pre-tag hook",
"tagging with ssh pre-tag numbered returning error" )
#pretag NOECHOSSH + FAILS numtag, without msg
out, errCode = self.swgitUtil_Clone_.tag_create( pretag_custtag_NOECHOSSH_ANDFAILS_num["tagtype"] )
self.assertEqual( errCode, 1, "MUST FAIL CUSTTAG NUM without msg and with noecho hook - \n%s\n" % out )
created_custtag_label = "%s/%s/000" % ( self.CREATED_BR, pretag_custtag_NOECHOSSH_ANDFAILS_num["tagtype"] )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label )
self.assertEqual( errCode, 1, "MUST FAIL retrieving tag %s - \n%s\n" % (created_custtag_label, tag0_sha) )
#pretag NOECHOSSH = FAILS numtag, with msg
user_comment = "MESSAGE IN A BOTTLE"
out, errCode = self.swgitUtil_Clone_.tag_create( pretag_custtag_NOECHOSSH_ANDFAILS_num["tagtype"], msg = user_comment )
self.assertEqual( errCode, 1, "MUST FAIL CUSTTAG NUM with msg and with noecho hook - \n%s\n" % out )
created_custtag_label_1 = "%s/%s/000" % ( self.CREATED_BR, pretag_custtag_NOECHOSSH_ANDFAILS_num["tagtype"] )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label_1 )
self.util_check_DENY_scenario( out, errCode,
"FAILED - NAMETAG_PREHOOK_NOECHOSSH_AND_FAILS pre-tag hook",
"tagging with ssh pre-tag numbered returning error" )
def test_Tag_06_04_CustomTags_Hook_PreTag_OverSsh_WrongVals( self ):
self.clone_createBr()
self.modify_and_commit()
commit0_sha, errCode = self.gitUtil_Clone_.get_currsha()
self.assertEqual( errCode, 0, "gitUtil_Clone_.get_currsha FAILED - \n%s\n" % commit0_sha )
#
# remote wrong scripts
#
#
# ssh custom tag WITHOUT IP
# must always fail
#
pretag_custtag_ECHOSSH_noip_num = copy.deepcopy( CUSTTAG_NUM )
pretag_custtag_NOECHOSSH_noip_num = copy.deepcopy( CUSTTAG_NUM )
script_comment = "comment: ABCD"
pretag_custtag_ECHOSSH_noip_num["tagtype"] = "NUMTAG_PREHOOK_ECHOSSH_NOIP"
pretag_custtag_ECHOSSH_noip_num["hook_pretag_script"] = "echo \"%s\"" % script_comment
pretag_custtag_ECHOSSH_noip_num["hook_pretag_sshuser"] = TEST_USER_SSH
pretag_custtag_ECHOSSH_noip_num["hook_pretag_sshaddr"] = ""
pretag_custtag_NOECHOSSH_noip_num["tagtype"] = "NAMETAG_PREHOOK_NOECHOSSH_NOIP"
pretag_custtag_NOECHOSSH_noip_num["hook_pretag_script"] = "echo \"AAA\" > /dev/null"
pretag_custtag_NOECHOSSH_noip_num["hook_pretag_sshuser"] = TEST_USER_SSH
pretag_custtag_NOECHOSSH_noip_num["hook_pretag_sshaddr"] = ""
self.swgitUtil_Clone_.tag_define_custom_tag( pretag_custtag_ECHOSSH_noip_num )
self.swgitUtil_Clone_.tag_define_custom_tag( pretag_custtag_NOECHOSSH_noip_num )
#pretag ECHOSSH noip numtag, without msg
out, errCode = self.swgitUtil_Clone_.tag_create( pretag_custtag_ECHOSSH_noip_num["tagtype"] )
self.assertEqual( errCode, 1, "MUST FAIL CUSTTAG NUM without msg but with echo hook and no ip - \n%s\n" % out )
created_custtag_label = "%s/%s/000" % ( self.CREATED_BR, pretag_custtag_ECHOSSH_noip_num["tagtype"] )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label )
self.assertEqual( errCode, 1, "FAILED retrieving tag %s - \n%s\n" % (created_custtag_label, tag0_sha) )
#pretag ECHOSSH noip numtag, with msg
user_comment = "MESSAGE IN A BOTTLE"
out, errCode = self.swgitUtil_Clone_.tag_create( pretag_custtag_ECHOSSH_noip_num["tagtype"], msg = user_comment )
self.assertEqual( errCode, 1, "MUST FAIL CUSTTAG NUM with msg and with echo hook and no ip - \n%s\n" % out )
created_custtag_label_1 = "%s/%s/000" % ( self.CREATED_BR, pretag_custtag_ECHOSSH_noip_num["tagtype"] )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label_1 )
self.assertEqual( errCode, 1, "MUST FAIL retrieving tag %s - \n%s\n" % (created_custtag_label_1, tag0_sha) )
#pretag NOECHOSSH noip numtag, without msg
out, errCode = self.swgitUtil_Clone_.tag_create( pretag_custtag_NOECHOSSH_noip_num["tagtype"] )
self.assertEqual( errCode, 1, "MUST FAIL CUSTTAG NUM without msg and with noecho hook and no ip - \n%s\n" % out )
created_custtag_label = "%s/%s/000" % ( self.CREATED_BR, pretag_custtag_NOECHOSSH_noip_num["tagtype"] )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label )
self.assertEqual( errCode, 1, "MUST FAIL retrieving tag %s - \n%s\n" % (created_custtag_label, tag0_sha) )
#pretag NOECHOSSH noip numtag, with msg
user_comment = "MESSAGE IN A BOTTLE"
out, errCode = self.swgitUtil_Clone_.tag_create( pretag_custtag_NOECHOSSH_noip_num["tagtype"], msg = user_comment )
self.assertEqual( errCode, 1, "MUST FAIL CUSTTAG NUM with msg and with noecho hook and no ip - \n%s\n" % out )
created_custtag_label_1 = "%s/%s/000" % ( self.CREATED_BR, pretag_custtag_NOECHOSSH_noip_num["tagtype"] )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label_1 )
self.assertEqual( errCode, 1, "FAILED retrieving tag %s - \n%s\n" % (created_custtag_label_1, tag0_sha) )
#
# ssh custom tag with WRONG IP
# when -m is provisioned, it is enough
#
pretag_custtag_ECHOSSH_wrongip_num = copy.deepcopy( CUSTTAG_NUM )
pretag_custtag_NOECHOSSH_wrongip_num = copy.deepcopy( CUSTTAG_NUM )
script_comment = "comment: ABCD"
pretag_custtag_ECHOSSH_wrongip_num["tagtype"] = "NUMTAG_PREHOOK_ECHOSSH_WRONGIP"
pretag_custtag_ECHOSSH_wrongip_num["hook_pretag_script"] = "echo \"%s\"" % script_comment
pretag_custtag_ECHOSSH_wrongip_num["hook_pretag_sshuser"] = TEST_USER_SSH
pretag_custtag_ECHOSSH_wrongip_num["hook_pretag_sshaddr"] = "127" #invalid IP
pretag_custtag_NOECHOSSH_wrongip_num["tagtype"] = "NAMETAG_PREHOOK_NOECHOSSH_WRONGIP"
pretag_custtag_NOECHOSSH_wrongip_num["hook_pretag_script"] = "echo \"AAA\" > /dev/null"
pretag_custtag_NOECHOSSH_wrongip_num["hook_pretag_sshuser"] = TEST_USER_SSH
pretag_custtag_NOECHOSSH_wrongip_num["hook_pretag_sshaddr"] = "127"
self.swgitUtil_Clone_.tag_define_custom_tag( pretag_custtag_ECHOSSH_wrongip_num )
self.swgitUtil_Clone_.tag_define_custom_tag( pretag_custtag_NOECHOSSH_wrongip_num )
#pretag ECHOSSH wrongip numtag, without msg
out, errCode = self.swgitUtil_Clone_.tag_create( pretag_custtag_ECHOSSH_wrongip_num["tagtype"] )
self.assertEqual( errCode, 1, "MUST FAIL CUSTTAG NUM without msg but with echo hook and wrong ip - \n%s\n" % out )
created_custtag_label = "%s/%s/000" % ( self.CREATED_BR, pretag_custtag_ECHOSSH_wrongip_num["tagtype"] )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label )
self.assertEqual( errCode, 1, "FAILED retrieving tag %s - \n%s\n" % (created_custtag_label, tag0_sha) )
#pretag ECHOSSH wrongip numtag, with msg
user_comment = "MESSAGE IN A BOTTLE"
out, errCode = self.swgitUtil_Clone_.tag_create( pretag_custtag_ECHOSSH_wrongip_num["tagtype"], msg = user_comment )
self.util_check_DENY_scenario( out, errCode,
"FAILED - NUMTAG_PREHOOK_ECHOSSH_WRONGIP pre-tag hook",
"tagging pretag_custtag_ECHOSSH_wrongip_num, with message" )
created_custtag_label_1 = "%s/%s/000" % ( self.CREATED_BR, pretag_custtag_ECHOSSH_wrongip_num["tagtype"] )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label_1 )
self.assertEqual( errCode, 1, "MUST FAIL retrieving tag %s - \n%s\n" % (created_custtag_label_1, tag0_sha) )
#pretag NOECHOSSH wrongip numtag, without msg
out, errCode = self.swgitUtil_Clone_.tag_create( pretag_custtag_NOECHOSSH_wrongip_num["tagtype"] )
self.util_check_DENY_scenario( out, errCode,
"FAILED - NAMETAG_PREHOOK_NOECHOSSH_WRONGIP pre-tag",
"tagging pretag_custtag_NOECHOSSH_wrongip_num, no message" )
created_custtag_label = "%s/%s/000" % ( self.CREATED_BR, pretag_custtag_NOECHOSSH_wrongip_num["tagtype"] )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label )
self.assertEqual( errCode, 1, "MUST FAIL retrieving tag %s - \n%s\n" % (created_custtag_label, tag0_sha) )
#pretag NOECHOSSH wrongip numtag, with msg
user_comment = "MESSAGE IN A BOTTLE"
out, errCode = self.swgitUtil_Clone_.tag_create( pretag_custtag_NOECHOSSH_wrongip_num["tagtype"], msg = user_comment )
self.util_check_DENY_scenario( out, errCode,
"FAILED - NAMETAG_PREHOOK_NOECHOSSH_WRONGIP pre-tag hook",
"tagging pretag_custtag_NOECHOSSH_wrongip_num, with message" )
created_custtag_label_1 = "%s/%s/000" % ( self.CREATED_BR, pretag_custtag_NOECHOSSH_wrongip_num["tagtype"] )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label_1 )
self.assertEqual( errCode, 1, "MUST FAIL retrieving tag %s - \n%s\n" % (created_custtag_label_1, tag0_sha) )
def test_Tag_07_00_CustomTags_Overload_list_regexp( self ):
self.clone_createBr()
self.modify_and_commit()
commit0_sha, errCode = self.gitUtil_Clone_.get_currsha()
self.assertEqual( errCode, 0, "gitUtil_Clone_.get_currsha FAILED - \n%s\n" % commit0_sha )
#define custom tags
self.swgitUtil_Clone_.tag_define_custom_tag( CUSTTAG_NUM )
self.swgitUtil_Clone_.tag_define_custom_tag( CUSTTAG_NAME )
basekey_num = "swgit.%s." % CUSTTAG_NUM["tagtype"]
basekey_name = "swgit.%s." % CUSTTAG_NAME["tagtype"]
#chenge regexp
self.swgitUtil_Clone_.set_cfg( basekey_num + "regexp", "^[a-z]{3}$" )
self.swgitUtil_Clone_.set_cfg( basekey_name + "regexp", "^[a-z]{3}$" )
#
# NUM NOW BECOMES A NAMED!!!
#
#simple numtag, with val without msg
out, errCode = self.swgitUtil_Clone_.tag_create( CUSTTAG_NUM["tagtype"], "avalue" )
self.assertEqual( errCode, 1, "MUST FAIL CUSTTAG NUM now NAME plus value (wrong regexp) - \n%s\n" % out )
#simple numtag, with val and with msg
out, errCode = self.swgitUtil_Clone_.tag_create( CUSTTAG_NUM["tagtype"], "avalue", msg = "something" )
self.assertEqual( errCode, 1, "MUST FAIL CUSTTAG NUM now NAME plus value (wrong regexp) - \n%s\n" % out )
#simple numtag, without val and without msg
out, errCode = self.swgitUtil_Clone_.tag_create( CUSTTAG_NUM["tagtype"], "avalue" )
self.assertEqual( errCode, 1, "MUST FAIL CUSTTAG NUM now NAME without msg and hook name (wrong regexp) - \n%s\n" % out )
#simple numtag NOW FAILS TOO
out, errCode = self.swgitUtil_Clone_.tag_create( CUSTTAG_NUM["tagtype"], msg = "something" )
self.assertEqual( errCode, 1, "MUST FAIL CUSTTAG NUM now NAME creation without name - \n%s\n" % out )
created_custtag_label = "%s/%s/000" % ( self.CREATED_BR, CUSTTAG_NUM["tagtype"] )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label )
self.assertEqual( errCode, 1, "MUST FAIL retrieving tag %s - \n%s\n" % (created_custtag_label, tag0_sha) )
#OLD NUM now NAME can be created with msg and right name
value = "abc"
out, errCode = self.swgitUtil_Clone_.tag_create( CUSTTAG_NUM["tagtype"], value, msg = "old num now named tag" )
self.assertEqual( errCode, 0, "FAILED CUSTTAG NUM now NAME respecting regexp - \n%s\n" % out )
created_custtag_label_0 = "%s/%s/%s" % ( self.CREATED_BR, CUSTTAG_NUM["tagtype"], value )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label_0 )
self.assertEqual( errCode, 0, "FAILED retrieving tag %s - \n%s\n" % (created_custtag_label_0, tag0_sha) )
self.assertEqual( commit0_sha, tag0_sha,
"FAILED creating tag CUSTTAG_NAME with first regexp commit sha (%s) different from tag sha (%s)" % ( commit0_sha, tag0_sha) )
#
#NAME CHANGED
#
#simple nametag, without val without msg
out, errCode = self.swgitUtil_Clone_.tag_create( CUSTTAG_NAME["tagtype"] )
self.assertEqual( errCode, 1, "MUST FAIL CUSTTAG NAME without value - \n%s\n" % out )
#simple nametag, without val with msg
out, errCode = self.swgitUtil_Clone_.tag_create( CUSTTAG_NAME["tagtype"], msg = "someother" )
self.assertEqual( errCode, 1, "MUST FAIL CUSTTAG NAME without value - \n%s\n" % out )
#simple nametag, with val without msg
out, errCode = self.swgitUtil_Clone_.tag_create( CUSTTAG_NAME["tagtype"] )
self.assertEqual( errCode, 1, "MUST FAIL CUSTTAG NAME without message - \n%s\n" % out )
#simple nametag correct AND ALL OLD REGEXP MUST FAIL
value = "abc"
out, errCode = self.swgitUtil_Clone_.tag_create( CUSTTAG_NAME["tagtype"], value, msg = "someother first regexp" )
self.assertEqual( errCode, 0, "FAILED CUSTTAG NAME respecting FIRST regexp - \n%s\n" % out )
created_custtag_label_0 = "%s/%s/%s" % ( self.CREATED_BR, CUSTTAG_NAME["tagtype"], value )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label_0 )
self.assertEqual( errCode, 0, "FAILED retrieving tag %s - \n%s\n" % (created_custtag_label_0, tag0_sha) )
self.assertEqual( commit0_sha, tag0_sha,
"FAILED creating tag CUSTTAG_NAME with first regexp commit sha (%s) different from tag sha (%s)" % ( commit0_sha, tag0_sha) )
value = "DropAB_2"
out, errCode = self.swgitUtil_Clone_.tag_create( CUSTTAG_NAME["tagtype"], value, msg = "someother first regexp" )
self.assertEqual( errCode, 1, "MUST FAIL CUSTTAG NAME respecting FIRST regexp, now overloaded with config - \n%s\n" % out )
created_custtag_label_1 = "%s/%s/%s" % ( self.CREATED_BR, CUSTTAG_NAME["tagtype"], value )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label_1 )
self.assertEqual( errCode, 1, "MUST FAIL retrieving tag %s - \n%s\n" % (created_custtag_label_1, tag0_sha) )
value = "Issue12345"
out, errCode = self.swgitUtil_Clone_.tag_create( CUSTTAG_NAME["tagtype"], value , msg = "someother second regexp" )
self.assertEqual( errCode, 1, "MUST FAIL CUSTTAG NAME respecting SECOND regexp, now overloaded with config - \n%s\n" % out )
created_custtag_label_2 = "%s/%s/%s" % ( self.CREATED_BR, CUSTTAG_NAME["tagtype"], value )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label_2 )
self.assertEqual( errCode, 1, "MUST FAIL retrieving tag %s - \n%s\n" % (created_custtag_label_2, tag0_sha) )
def test_Tag_07_01_CustomTags_Overload_bool_oneXcommit( self ):
self.clone_createBr()
self.modify_and_commit()
commit0_sha, errCode = self.gitUtil_Clone_.get_currsha()
self.assertEqual( errCode, 0, "gitUtil_Clone_.get_currsha FAILED - \n%s\n" % commit0_sha )
#
# NOFIRST CREATE LABELS ONE_X_COMMIT = True
#
CUSTTAG_NUM_ONEXCOMMIT = copy.deepcopy( CUSTTAG_NUM )
CUSTTAG_NAME_ONEXCOMMIT = copy.deepcopy( CUSTTAG_NAME )
CUSTTAG_NUM_ONEXCOMMIT["one_x_commit"] = "True"
CUSTTAG_NAME_ONEXCOMMIT["one_x_commit"] = "True"
self.swgitUtil_Clone_.tag_define_custom_tag( CUSTTAG_NUM_ONEXCOMMIT )
self.swgitUtil_Clone_.tag_define_custom_tag( CUSTTAG_NAME_ONEXCOMMIT )
#
# create 2 labels, must fail second
#
#NUM0
out, errCode = self.swgitUtil_Clone_.tag_create( CUSTTAG_NUM_ONEXCOMMIT["tagtype"], msg = "something num" )
self.assertEqual( errCode, 0, "FAILED CUSTTAG NUM - \n%s\n" % out )
created_custtag_label = "%s/%s/000" % ( self.CREATED_BR, CUSTTAG_NUM_ONEXCOMMIT["tagtype"] )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label )
self.assertEqual( errCode, 0, "FAILED retrieving tag %s - \n%s\n" % (created_custtag_label, tag0_sha) )
#NUM1
out, errCode = self.swgitUtil_Clone_.tag_create( CUSTTAG_NUM_ONEXCOMMIT["tagtype"], msg = "something num second" )
self.assertEqual( errCode, 1, "MUST FAIL CUSTTAG NUM on same commit - \n%s\n" % out )
created_custtag_label = "%s/%s/001" % ( self.CREATED_BR, CUSTTAG_NUM_ONEXCOMMIT["tagtype"] )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label )
self.assertEqual( errCode, 1, "MUST FAIL retrieving tag %s - \n%s\n" % (created_custtag_label, tag0_sha) )
#NAME0
value = "Issue12345"
out, errCode = self.swgitUtil_Clone_.tag_create( CUSTTAG_NAME_ONEXCOMMIT["tagtype"], value , msg = "someother name" )
self.assertEqual( errCode, 0, "FAILED CUSTTAG NAME respecting SECOND regexp - \n%s\n" % out )
created_custtag_label_1 = "%s/%s/%s" % ( self.CREATED_BR, CUSTTAG_NAME_ONEXCOMMIT["tagtype"], value )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label_1 )
self.assertEqual( errCode, 0, "FAILED retrieving tag %s - \n%s\n" % (created_custtag_label_1, tag0_sha) )
#NAME1
value = "Issue77777"
out, errCode = self.swgitUtil_Clone_.tag_create( CUSTTAG_NAME_ONEXCOMMIT["tagtype"], value , msg = "someother name second" )
self.assertEqual( errCode, 1, "MUST FAIL CUSTTAG NAME second on same - \n%s\n" % out )
created_custtag_label_1 = "%s/%s/%s" % ( self.CREATED_BR, CUSTTAG_NAME_ONEXCOMMIT["tagtype"], value )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label_1 )
self.assertEqual( errCode, 1, "MUST FAIL retrieving tag %s - \n%s\n" % (created_custtag_label_1, tag0_sha) )
#
# NOW OVERLOAD ONE_X_COMMIT BUT WITH WRONG BOOL VALUE = Falseeeee
#
basekey_num = "swgit.%s." % CUSTTAG_NUM["tagtype"]
basekey_name = "swgit.%s." % CUSTTAG_NAME["tagtype"]
#chenge regexp
self.swgitUtil_Clone_.set_cfg( basekey_num + "one-x-commit", "Falseeee" )
self.swgitUtil_Clone_.set_cfg( basekey_name + "one-x-commit", "Falseeee" )
#NUM1
out, errCode = self.swgitUtil_Clone_.tag_create( CUSTTAG_NUM_ONEXCOMMIT["tagtype"], msg = "something num second" )
self.assertEqual( errCode, 1, "MUST FAIL CUSTTAG NUM on same commit - \n%s\n" % out )
created_custtag_label = "%s/%s/001" % ( self.CREATED_BR, CUSTTAG_NUM_ONEXCOMMIT["tagtype"] )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label )
self.assertEqual( errCode, 1, "MUST FAIL retrieving tag %s - \n%s\n" % (created_custtag_label, tag0_sha) )
#NAME1
value = "Issue77777"
out, errCode = self.swgitUtil_Clone_.tag_create( CUSTTAG_NAME_ONEXCOMMIT["tagtype"], value , msg = "someother name second" )
self.assertEqual( errCode, 1, "MUST FAIL CUSTTAG NAME second on same - \n%s\n" % out )
created_custtag_label_1 = "%s/%s/%s" % ( self.CREATED_BR, CUSTTAG_NAME_ONEXCOMMIT["tagtype"], value )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label_1 )
self.assertEqual( errCode, 1, "MUST FAIL retrieving tag %s - \n%s\n" % (created_custtag_label_1, tag0_sha) )
#
# NOW OVERLOAD ONE_X_COMMIT = False
#
basekey_num = "swgit.%s." % CUSTTAG_NUM["tagtype"]
basekey_name = "swgit.%s." % CUSTTAG_NAME["tagtype"]
#chenge regexp
self.swgitUtil_Clone_.set_cfg( basekey_num + "one-x-commit", "False" )
self.swgitUtil_Clone_.set_cfg( basekey_name + "one-x-commit", "False" )
#NUM1
out, errCode = self.swgitUtil_Clone_.tag_create( CUSTTAG_NUM_ONEXCOMMIT["tagtype"], msg = "something num second" )
self.assertEqual( errCode, 0, "FAILED CUSTTAG NUM on same commit after overriding one-x-commit = False - \n%s\n" % out )
created_custtag_label = "%s/%s/001" % ( self.CREATED_BR, CUSTTAG_NUM_ONEXCOMMIT["tagtype"] )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label )
self.assertEqual( errCode, 0, "FAILED retrieving tag %s - \n%s\n" % (created_custtag_label, tag0_sha) )
#NAME1
value = "Issue77777"
out, errCode = self.swgitUtil_Clone_.tag_create( CUSTTAG_NAME_ONEXCOMMIT["tagtype"], value , msg = "someother name second" )
self.assertEqual( errCode, 0, "FAILED CUSTTAG NAME second on same commit after overriding one-x-commit = False - \n%s\n" % out )
created_custtag_label_1 = "%s/%s/%s" % ( self.CREATED_BR, CUSTTAG_NAME_ONEXCOMMIT["tagtype"], value )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label_1 )
self.assertEqual( errCode, 0, "FAILED retrieving tag %s - \n%s\n" % (created_custtag_label_1, tag0_sha) )
def test_Tag_08_00_Delete( self ):
self.clone_createBr()
self.modify_and_commit()
commit0_sha, errCode = self.gitUtil_Clone_.get_currsha()
self.assertEqual( errCode, 0, "gitUtil_Clone_.get_currsha FAILED - \n%s\n" % commit0_sha )
#dev only local
out, errCode = self.swgitUtil_Clone_.tag_create( "dev", msg = "local dev" )
self.assertEqual( errCode, 0, "FAILED tag DEV - \n%s\n" % out )
#simulate a tag previously pushed
origin_dev = "origin/" + ORIG_REPO_DEVEL_BRANCH
out, errCode = self.gitUtil_Clone_.tag_put_on_commit( self.CREATED_DEV_1, origin_dev )
self.assertEqual( errCode, 0, "FAILED tag DEV in past, on refernce %s - \n%s\n" % (origin_dev, out) )
out, errCode = self.swgitUtil_Clone_.tag_delete( self.CREATED_DEV_0 )
self.assertEqual( errCode, 0, "FAILED tag delete of %s - \n%s\n" % (self.CREATED_DEV_0, out) )
out, errCode = self.swgitUtil_Clone_.tag_delete( self.CREATED_DEV_1 )
self.assertEqual( errCode, 1, "MUST FAIL tag delete of already pushed tag (%s) - \n%s\n" % (self.CREATED_DEV_1, out) )
out, errCode = self.swgitUtil_Clone_.tag_delete( TEST_REPO_TAG_LIV )
self.assertEqual( errCode, 1, "MUST FAIL tag delete of LIV label on NON integrator repo (%s) - \n%s\n" % (TEST_REPO_TAG_LIV, out) )
def test_Tag_08_01_DeleteRemote_mine( self ):
out, errCode = swgit__utils.clone_scripts_repo( self.TAG_CLONE_DIR )
self.assertEqual( errCode, 0, "SWGIT clone FAILED - swgit__utils.clone_scripts_repo" )
#create file labels and push
out, errCode = self.swgitUtil_Clone_.branch_create( self.BRANCH_NAME )
self.util_check_SUCC_scenario( out, errCode, "", "create branch" )
out, errCode = self.swgitUtil_Clone_.modify_file( ORIG_REPO_aFILE, "ccc" )
self.util_check_SUCC_scenario( out, errCode, "", "modify file" )
out, errCode = self.swgitUtil_Clone_.commit_minusA_dev_fix( self.DDTS )
self.util_check_SUCC_scenario( out, errCode, "", "commmit" )
out, errCode = self.sw_origrepo_h.branch_switch_to_br( ORIG_REPO_aBRANCH )
self.util_check_SUCC_scenario( out, errCode, "", "push" )
#new must be denied
out, errCode = self.swgitUtil_Clone_.tag_delete( self.CREATED_BR_NEWBR )
self.util_check_DENY_scenario( out, errCode, "tags can be deleted only by deleting associated branch", "delete tag" )
#push
if modetest_morerepos():
out, errCode = self.swgitUtil_Clone_.push_with_merge( ORIG_REPO_AREMOTE_NAME )
self.util_check_SUCC_scenario( out, errCode, "", "push" )
remote_h = self.sw_aremoterepo_h
else:
out, errCode = self.swgitUtil_Clone_.push_with_merge()
self.util_check_SUCC_scenario( out, errCode, "", "push" )
remote_h = self.sw_origrepo_h
#move afterwards
out, errCode = self.swgitUtil_Clone_.modify_file( ORIG_REPO_aFILE, "ddd" )
self.util_check_SUCC_scenario( out, errCode, "", "modify file" )
out, errCode = self.swgitUtil_Clone_.commit_minusA()
self.util_check_SUCC_scenario( out, errCode, "", "commit" )
# check tag existence local
self.assertEqual( self.swgitUtil_Clone_.ref2sha( self.CREATED_BR )[1], 0, "Not found %s" % self.CREATED_BR )
self.assertEqual( self.swgitUtil_Clone_.ref2sha( self.CREATED_BR_NEWBR )[1], 0, "Not found %s" % self.CREATED_BR_NEWBR )
self.assertEqual( self.swgitUtil_Clone_.ref2sha( self.CREATED_BR_DEV )[1], 0, "Not found %s" % self.CREATED_BR_DEV )
self.assertEqual( self.swgitUtil_Clone_.ref2sha( self.CREATED_BR_FIX )[1], 0, "Not found %s" % self.CREATED_BR_FIX )
# check tag existence remote
self.assertEqual( remote_h.ref2sha( self.CREATED_BR )[1], 0, "Not found %s" % self.CREATED_BR )
self.assertEqual( remote_h.ref2sha( self.CREATED_BR_NEWBR )[1], 0, "Not found %s" % self.CREATED_BR_NEWBR )
self.assertEqual( remote_h.ref2sha( self.CREATED_BR_DEV )[1], 0, "Not found %s" % self.CREATED_BR_DEV )
self.assertEqual( remote_h.ref2sha( self.CREATED_BR_FIX )[1], 0, "Not found %s" % self.CREATED_BR_FIX )
# delete not forced no-op
out, errCode = self.swgitUtil_Clone_.tag_delete( self.CREATED_BR_DEV )
self.util_check_DENY_scenario( out, errCode, "Cannot delete a tag already pushed on origin", "delete tag" )
# delete not forced no-op
out, errCode = self.swgitUtil_Clone_.tag_delete( self.CREATED_BR_FIX )
self.util_check_DENY_scenario( out, errCode, "Cannot delete a tag already pushed on origin", "delete tag" )
# delete not forced no-op
out, errCode = self.swgitUtil_Clone_.tag_delete( self.CREATED_BR_NEWBR )
self.util_check_DENY_scenario( out, errCode, "tags can be deleted only by deleting associated branch", "delete tag" )
# check tag existence local
self.assertEqual( self.swgitUtil_Clone_.ref2sha( self.CREATED_BR )[1], 0, "Not found %s" % self.CREATED_BR )
self.assertEqual( self.swgitUtil_Clone_.ref2sha( self.CREATED_BR_NEWBR )[1], 0, "Not found %s" % self.CREATED_BR_NEWBR )
self.assertEqual( self.swgitUtil_Clone_.ref2sha( self.CREATED_BR_DEV )[1], 0, "Not found %s" % self.CREATED_BR_DEV )
self.assertEqual( self.swgitUtil_Clone_.ref2sha( self.CREATED_BR_FIX )[1], 0, "Not found %s" % self.CREATED_BR_FIX )
# check tag existence remote
self.assertEqual( remote_h.ref2sha( self.CREATED_BR )[1], 0, "Not found %s" % self.CREATED_BR )
self.assertEqual( remote_h.ref2sha( self.CREATED_BR_NEWBR )[1], 0, "Not found %s" % self.CREATED_BR_NEWBR )
self.assertEqual( remote_h.ref2sha( self.CREATED_BR_DEV )[1], 0, "Not found %s" % self.CREATED_BR_DEV )
self.assertEqual( remote_h.ref2sha( self.CREATED_BR_FIX )[1], 0, "Not found %s" % self.CREATED_BR_FIX )
#
# forced delete DEV
out, errCode = self.swgitUtil_Clone_.tag_delete_e( self.CREATED_BR_DEV )
self.util_check_SUCC_scenario( out, errCode, "Deleting also remote tag", "delete tag -D" )
# check tag existence local
self.assertEqual( self.swgitUtil_Clone_.ref2sha( self.CREATED_BR_DEV )[1], 1, "Must not exists %s" % self.CREATED_BR_DEV )
self.assertEqual( self.swgitUtil_Clone_.ref2sha( self.CREATED_BR )[1], 0, "Not found %s" % self.CREATED_BR )
self.assertEqual( self.swgitUtil_Clone_.ref2sha( self.CREATED_BR_NEWBR )[1], 0, "Not found %s" % self.CREATED_BR_NEWBR )
self.assertEqual( self.swgitUtil_Clone_.ref2sha( self.CREATED_BR_FIX )[1], 0, "Not found %s" % self.CREATED_BR_FIX )
# check tag existence remote
self.assertEqual( remote_h.ref2sha( self.CREATED_BR )[1], 0, "Not found %s" % self.CREATED_BR )
self.assertEqual( remote_h.ref2sha( self.CREATED_BR_NEWBR )[1], 0, "Not found %s" % self.CREATED_BR_NEWBR )
self.assertEqual( remote_h.ref2sha( self.CREATED_BR_DEV )[1], 1, "Must not find %s" % self.CREATED_BR_DEV )
self.assertEqual( remote_h.ref2sha( self.CREATED_BR_FIX )[1], 0, "Not found %s" % self.CREATED_BR_FIX )
#
# re-forced delete DEV
out, errCode = self.swgitUtil_Clone_.tag_delete_e( self.CREATED_BR_DEV )
self.util_check_DENY_scenario( out, errCode,
"Please specify a valid tag to be deleted",
"re-delete tag -D" )
#
# forced delete FIX
out, errCode = self.swgitUtil_Clone_.tag_delete_e( self.CREATED_BR_FIX )
self.util_check_SUCC_scenario( out, errCode, "Deleting also remote tag", "delete tag -D" )
# check tag existence local
self.assertEqual( self.swgitUtil_Clone_.ref2sha( self.CREATED_BR_DEV )[1], 1, "Must not exists %s" % self.CREATED_BR_DEV )
self.assertEqual( self.swgitUtil_Clone_.ref2sha( self.CREATED_BR_FIX )[1], 1, "Must not exists %s" % self.CREATED_BR_FIX )
self.assertEqual( self.swgitUtil_Clone_.ref2sha( self.CREATED_BR )[1], 0, "Not found %s" % self.CREATED_BR )
self.assertEqual( self.swgitUtil_Clone_.ref2sha( self.CREATED_BR_NEWBR )[1], 0, "Not found %s" % self.CREATED_BR_NEWBR )
#
# forced delete NEW
out, errCode = self.swgitUtil_Clone_.tag_delete_e( self.CREATED_BR_NEWBR )
self.util_check_DENY_scenario( out, errCode, "tags can be deleted only by deleting associated branch", "delete NEW" )
# check tag existence local
self.assertEqual( self.swgitUtil_Clone_.ref2sha( self.CREATED_BR_DEV )[1], 1, "Must not exists %s" % self.CREATED_BR_DEV )
self.assertEqual( self.swgitUtil_Clone_.ref2sha( self.CREATED_BR_FIX )[1], 1, "Must not exists %s" % self.CREATED_BR_FIX )
self.assertEqual( self.swgitUtil_Clone_.ref2sha( self.CREATED_BR_NEWBR )[1], 0, "Not found %s" % self.CREATED_BR_NEWBR )
self.assertEqual( self.swgitUtil_Clone_.ref2sha( self.CREATED_BR )[1], 0, "Not found %s" % self.CREATED_BR )
def test_Tag_08_02_DeleteRemote_mine_fromDetached( self ):
out, errCode = swgit__utils.clone_scripts_repo( self.TAG_CLONE_DIR )
self.assertEqual( errCode, 0, "SWGIT clone FAILED - swgit__utils.clone_scripts_repo" )
#create file labels and push
out, errCode = self.swgitUtil_Clone_.branch_create( self.BRANCH_NAME )
self.util_check_SUCC_scenario( out, errCode, "", "create branch" )
out, errCode = self.swgitUtil_Clone_.modify_file( ORIG_REPO_aFILE, "ccc" )
self.util_check_SUCC_scenario( out, errCode, "", "modify file" )
out, errCode = self.swgitUtil_Clone_.commit_minusA_dev_fix( self.DDTS )
self.util_check_SUCC_scenario( out, errCode, "", "commmit" )
out, errCode = self.sw_origrepo_h.branch_switch_to_br( ORIG_REPO_aBRANCH )
self.util_check_SUCC_scenario( out, errCode, "", "push" )
#new must be denied
out, errCode = self.swgitUtil_Clone_.tag_delete( self.CREATED_BR_NEWBR )
self.util_check_DENY_scenario( out, errCode, "tags can be deleted only by deleting associated branch", "delete tag" )
#push
if modetest_morerepos():
out, errCode = self.swgitUtil_Clone_.push_with_merge( ORIG_REPO_AREMOTE_NAME )
self.util_check_SUCC_scenario( out, errCode, "", "push" )
remote_h = self.sw_aremoterepo_h
else:
out, errCode = self.swgitUtil_Clone_.push_with_merge()
self.util_check_SUCC_scenario( out, errCode, "", "push" )
remote_h = self.sw_origrepo_h
#move detached
shaStable, err = self.swgitUtil_Clone_.ref2sha( "origin/" + ORIG_REPO_STABLE_BRANCH )
out, errCode = remote_h.branch_switch_to_br( shaStable )
self.util_check_SUCC_scenario( out, errCode, "", "go detached" )
# check tag existence local
self.assertEqual( self.swgitUtil_Clone_.ref2sha( self.CREATED_BR )[1], 0, "Not found %s" % self.CREATED_BR )
self.assertEqual( self.swgitUtil_Clone_.ref2sha( self.CREATED_BR_NEWBR )[1], 0, "Not found %s" % self.CREATED_BR_NEWBR )
self.assertEqual( self.swgitUtil_Clone_.ref2sha( self.CREATED_BR_DEV )[1], 0, "Not found %s" % self.CREATED_BR_DEV )
self.assertEqual( self.swgitUtil_Clone_.ref2sha( self.CREATED_BR_FIX )[1], 0, "Not found %s" % self.CREATED_BR_FIX )
# check tag existence remote
self.assertEqual( remote_h.ref2sha( self.CREATED_BR )[1], 0, "Not found %s" % self.CREATED_BR )
self.assertEqual( remote_h.ref2sha( self.CREATED_BR_NEWBR )[1], 0, "Not found %s" % self.CREATED_BR_NEWBR )
self.assertEqual( remote_h.ref2sha( self.CREATED_BR_DEV )[1], 0, "Not found %s" % self.CREATED_BR_DEV )
self.assertEqual( remote_h.ref2sha( self.CREATED_BR_FIX )[1], 0, "Not found %s" % self.CREATED_BR_FIX )
# delete not forced no-op
out, errCode = self.swgitUtil_Clone_.tag_delete( self.CREATED_BR_DEV )
self.util_check_DENY_scenario( out, errCode, "Cannot delete a tag already pushed on origin", "delete tag" )
# delete not forced no-op
out, errCode = self.swgitUtil_Clone_.tag_delete( self.CREATED_BR_FIX )
self.util_check_DENY_scenario( out, errCode, "Cannot delete a tag already pushed on origin", "delete tag" )
# delete not forced no-op
out, errCode = self.swgitUtil_Clone_.tag_delete( self.CREATED_BR_NEWBR )
self.util_check_DENY_scenario( out, errCode, "tags can be deleted only by deleting associated branch", "delete tag" )
# check tag existence local
self.assertEqual( self.swgitUtil_Clone_.ref2sha( self.CREATED_BR )[1], 0, "Not found %s" % self.CREATED_BR )
self.assertEqual( self.swgitUtil_Clone_.ref2sha( self.CREATED_BR_NEWBR )[1], 0, "Not found %s" % self.CREATED_BR_NEWBR )
self.assertEqual( self.swgitUtil_Clone_.ref2sha( self.CREATED_BR_DEV )[1], 0, "Not found %s" % self.CREATED_BR_DEV )
self.assertEqual( self.swgitUtil_Clone_.ref2sha( self.CREATED_BR_FIX )[1], 0, "Not found %s" % self.CREATED_BR_FIX )
# check tag existence remote
self.assertEqual( remote_h.ref2sha( self.CREATED_BR )[1], 0, "Not found %s" % self.CREATED_BR )
self.assertEqual( remote_h.ref2sha( self.CREATED_BR_NEWBR )[1], 0, "Not found %s" % self.CREATED_BR_NEWBR )
self.assertEqual( remote_h.ref2sha( self.CREATED_BR_DEV )[1], 0, "Not found %s" % self.CREATED_BR_DEV )
self.assertEqual( remote_h.ref2sha( self.CREATED_BR_FIX )[1], 0, "Not found %s" % self.CREATED_BR_FIX )
#
# forced delete DEV
out, errCode = self.swgitUtil_Clone_.tag_delete_e( self.CREATED_BR_DEV )
self.util_check_SUCC_scenario( out, errCode, "Deleting also remote tag", "delete tag -D" )
# check tag existence local
self.assertEqual( self.swgitUtil_Clone_.ref2sha( self.CREATED_BR_DEV )[1], 1, "Must not exists %s" % self.CREATED_BR_DEV )
self.assertEqual( self.swgitUtil_Clone_.ref2sha( self.CREATED_BR )[1], 0, "Not found %s" % self.CREATED_BR )
self.assertEqual( self.swgitUtil_Clone_.ref2sha( self.CREATED_BR_NEWBR )[1], 0, "Not found %s" % self.CREATED_BR_NEWBR )
self.assertEqual( self.swgitUtil_Clone_.ref2sha( self.CREATED_BR_FIX )[1], 0, "Not found %s" % self.CREATED_BR_FIX )
# check tag existence remote
self.assertEqual( remote_h.ref2sha( self.CREATED_BR )[1], 0, "Not found %s" % self.CREATED_BR )
self.assertEqual( remote_h.ref2sha( self.CREATED_BR_NEWBR )[1], 0, "Not found %s" % self.CREATED_BR_NEWBR )
self.assertEqual( remote_h.ref2sha( self.CREATED_BR_DEV )[1], 1, "Must not find %s" % self.CREATED_BR_DEV )
self.assertEqual( remote_h.ref2sha( self.CREATED_BR_FIX )[1], 0, "Not found %s" % self.CREATED_BR_FIX )
#
# re-forced delete DEV
out, errCode = self.swgitUtil_Clone_.tag_delete_e( self.CREATED_BR_DEV )
self.util_check_DENY_scenario( out, errCode,
"Please specify a valid tag to be deleted",
"re-delete tag -D" )
#
# forced delete FIX
out, errCode = self.swgitUtil_Clone_.tag_delete_e( self.CREATED_BR_FIX )
self.util_check_SUCC_scenario( out, errCode, "Deleting also remote tag", "delete tag -D" )
# check tag existence local
self.assertEqual( self.swgitUtil_Clone_.ref2sha( self.CREATED_BR_DEV )[1], 1, "Must not exists %s" % self.CREATED_BR_DEV )
self.assertEqual( self.swgitUtil_Clone_.ref2sha( self.CREATED_BR_FIX )[1], 1, "Must not exists %s" % self.CREATED_BR_FIX )
self.assertEqual( self.swgitUtil_Clone_.ref2sha( self.CREATED_BR )[1], 0, "Not found %s" % self.CREATED_BR )
self.assertEqual( self.swgitUtil_Clone_.ref2sha( self.CREATED_BR_NEWBR )[1], 0, "Not found %s" % self.CREATED_BR_NEWBR )
#
# forced delete NEW
out, errCode = self.swgitUtil_Clone_.tag_delete_e( self.CREATED_BR_NEWBR )
self.util_check_DENY_scenario( out, errCode, "tags can be deleted only by deleting associated branch", "delete NEW" )
# check tag existence local
self.assertEqual( self.swgitUtil_Clone_.ref2sha( self.CREATED_BR_DEV )[1], 1, "Must not exists %s" % self.CREATED_BR_DEV )
self.assertEqual( self.swgitUtil_Clone_.ref2sha( self.CREATED_BR_FIX )[1], 1, "Must not exists %s" % self.CREATED_BR_FIX )
self.assertEqual( self.swgitUtil_Clone_.ref2sha( self.CREATED_BR_NEWBR )[1], 0, "Not found %s" % self.CREATED_BR_NEWBR )
self.assertEqual( self.swgitUtil_Clone_.ref2sha( self.CREATED_BR )[1], 0, "Not found %s" % self.CREATED_BR )
def test_Tag_08_03_DeleteRemote_NotPushed( self ):
out, errCode = swgit__utils.clone_scripts_repo( self.TAG_CLONE_DIR )
self.assertEqual( errCode, 0, "SWGIT clone FAILED - swgit__utils.clone_scripts_repo" )
#create file labels and push
out, errCode = self.swgitUtil_Clone_.branch_create( self.BRANCH_NAME )
self.util_check_SUCC_scenario( out, errCode, "", "create branch" )
out, errCode = self.swgitUtil_Clone_.modify_file( ORIG_REPO_aFILE, "ccc" )
self.util_check_SUCC_scenario( out, errCode, "", "modify file" )
out, errCode = self.swgitUtil_Clone_.commit_minusA_dev_fix( self.DDTS )
self.util_check_SUCC_scenario( out, errCode, "", "commmit" )
out, errCode = self.sw_origrepo_h.branch_switch_to_br( ORIG_REPO_aBRANCH )
self.util_check_SUCC_scenario( out, errCode, "", "push" )
out, errCode = self.gitUtil_Clone_.ref2sha( self.CREATED_BR_DEV )
self.util_check_SUCC_scenario( out, errCode, "", "retrieving local %s" % self.CREATED_BR_DEV )
out, errCode = self.sw_origrepo_h.ref2sha( self.CREATED_BR_DEV )
self.util_check_DENY_scenario( out, errCode, "", "retrieving remote %s" % self.CREATED_BR_DEV )
#
# forced delete DEV
if modetest_morerepos():
out, errCode = self.swgitUtil_Clone_.tag_delete_e( self.CREATED_BR_DEV )
self.util_check_DENY_scenario( out, errCode, "No remote branch found for branch name", "delete tag -e" )
out, errCode = self.swgitUtil_Clone_.tag_delete( self.CREATED_BR_DEV )
self.util_check_SUCC_scenario( out, errCode, "", "delete tag -d" )
else:
out, errCode = self.swgitUtil_Clone_.tag_delete_e( self.CREATED_BR_DEV )
self.util_check_SUCC_scenario( out, errCode, "Deleting also remote tag", "delete tag -e" )
# HERE IS IN PAST FOR EVERYONE BUT ON CLONE POINT
#
# repo
# |
# A
# |\
# | B
# | |
# | C <-- orig_modbr
# |/ vvvv
# D <-- o/dev and dev and HEAD
# \ ^^^^
# E <-- prova_tag
#
def test_Tag_09_00_TagInPast_OnClonePoint( self ):
self.clone_createBr( somecommmitondev = True )
self.modify_and_commit()
sha_clonetime, errCode = self.gitUtil_Clone_.get_currsha( "origin/%s" % ORIG_REPO_DEVEL_BRANCH )
#goto int
out, errCode = self.swgitUtil_Clone_.branch_switch_to_int()
self.assertEqual( errCode, 0, "FAILED switch to int - out:\n%s" % out )
sha_intbr, errCode = self.gitUtil_Clone_.get_currsha()
self.assertEqual( sha_clonetime, sha_intbr, "FAILED switch to int , another place %s- out:\n%s" % (sha_intbr,out) )
#define custom tag
TAG_NUM_ECHO_NOPAST = copy.deepcopy( CUSTTAG_NUM )
script_comment = "comment: ABCD"
TAG_NUM_ECHO_NOPAST["tagtype"] = "TAGINPAST"
TAG_NUM_ECHO_NOPAST["hook_pretag_script"] = "echo \"%s\"" % script_comment
TAG_NUM_ECHO_NOPAST["tag_in_past"] = "falSe"
self.swgitUtil_Clone_.tag_define_custom_tag( TAG_NUM_ECHO_NOPAST )
#create tag
out, errCode = self.swgitUtil_Clone_.tag_create( TAG_NUM_ECHO_NOPAST["tagtype"] )
self.util_check_DENY_scenario( out, errCode,
"Commit you are tagging is already pushed on origin",
"tagging in past a nopast label" )
#overload option to allow tagging
basekey_num = "swgit.%s." % TAG_NUM_ECHO_NOPAST["tagtype"]
self.swgitUtil_Clone_.set_cfg( basekey_num + "tag-in-past", "TRue" )
#create tag
# NOTE: on INT MUST BE INT develop the tag base
BASEBR = ORIG_REPO_DEVEL_BRANCH
created_custtag_label = "%s/%s/000" % ( BASEBR, TAG_NUM_ECHO_NOPAST["tagtype"] )
created_custtag_PAST_label = "PAST/%s" % created_custtag_label
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label )
self.util_check_DENY_scenario( out, errCode, "", "retrieving %s" % created_custtag_label )
tagPast_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_PAST_label )
self.util_check_DENY_scenario( out, errCode, "", "retrieving %s" % created_custtag_PAST_label )
out, errCode = self.swgitUtil_Clone_.tag_create( TAG_NUM_ECHO_NOPAST["tagtype"] )
self.util_check_SUCC_scenario( out, errCode,
"Tagging in past also creates tag PAST/",
"tagging in past a past label" )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label )
self.util_check_SUCC_scenario( out, errCode, "", "retrieving %s" % created_custtag_label )
tagPast_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_PAST_label )
self.util_check_SUCC_scenario( out, errCode, "", "retrieving %s" % created_custtag_PAST_label )
#commit and clone int br
out, errCode = self.gitUtil_Clone_.commit_minusA()
self.util_check_SUCC_scenario( out, errCode, "", "commit on INT" )
out, errCode = self.swgitUtil_Repo_.branch_switch_to_br( self.ORIG_MOD_BRANCH )
self.util_check_SUCC_scenario( out, errCode, "", "push" )
out, errCode = self.swgitUtil_Clone_.push()
self.util_check_SUCC_scenario( out, errCode, "", "push" )
#check tag existenge and not
out, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label )
self.util_check_SUCC_scenario( out, errCode, "", "retrieving %s" % created_custtag_label )
tagPast_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_PAST_label )
self.util_check_DENY_scenario( out, errCode, "", "retrieving %s" % created_custtag_PAST_label )
out, errCode = self.gitUtil_Repo_.ref2sha( created_custtag_label )
self.util_check_SUCC_scenario( out, errCode, "", "retrieving %s on origin" % created_custtag_label )
tagPast_sha, errCode = self.gitUtil_Repo_.ref2sha( created_custtag_PAST_label )
self.util_check_DENY_scenario( out, errCode, "", "retrieving %s on origin" % created_custtag_PAST_label )
# HERE IS IN PAST FOR EVERYONE BUT NOT IN DETACHED
#
# repo
# |
# A
# |\
# | B
# | | vvvv
# | C <-- orig_modbr and HEAD
# |/ ^^^^
# D <-- o/dev and dev
# \
# E <-- prova_tag
#
def test_Tag_09_01_TagInPast_OnOriginBranch( self ):
self.clone_createBr( somecommmitondev = True )
self.modify_and_commit()
ORIGBR_INPAST = "origin/%s" % self.ORIG_MOD_FULL_BRANCH
sha_origbr, errCode = self.gitUtil_Clone_.get_currsha( ORIGBR_INPAST )
out, errCode = self.swgitUtil_Clone_.branch_switch_to_br( self.ORIG_MOD_BRANCH )
self.assertEqual( errCode, 0, "FAILED switch to br %s - out:\n%s" % ( self.ORIG_MOD_BRANCH, out) )
sha_curr, errCode = self.gitUtil_Clone_.get_currsha()
self.assertEqual( sha_curr, sha_origbr, "FAILED switch to br, another place %s- out:\n%s" % (self.ORIG_MOD_BRANCH,out) )
#define custom tag
TAG_NUM_ECHO_NOPAST = copy.deepcopy( CUSTTAG_NUM )
script_comment = "comment: ABCD"
TAG_NUM_ECHO_NOPAST["tagtype"] = "TAGINPAST"
TAG_NUM_ECHO_NOPAST["hook_pretag_script"] = "echo \"%s\"" % script_comment
TAG_NUM_ECHO_NOPAST["tag_in_past"] = "falSe"
self.swgitUtil_Clone_.tag_define_custom_tag( TAG_NUM_ECHO_NOPAST )
#create tag
out, errCode = self.swgitUtil_Clone_.tag_create( TAG_NUM_ECHO_NOPAST["tagtype"] )
self.util_check_DENY_scenario( out, errCode,
"Commit you are tagging is already pushed on origin",
"tagging in past a nopast label" )
#overload option to allow tagging
basekey_num = "swgit.%s." % TAG_NUM_ECHO_NOPAST["tagtype"]
self.swgitUtil_Clone_.set_cfg( basekey_num + "tag-in-past", "TRue" )
#create tag
BASEBR = self.ORIG_MOD_FULL_BRANCH
created_custtag_label = "%s/%s/000" % ( BASEBR, TAG_NUM_ECHO_NOPAST["tagtype"] )
created_custtag_PAST_label = "PAST/%s" % created_custtag_label
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label )
self.util_check_DENY_scenario( out, errCode, "", "retrieving %s" % created_custtag_label )
tagPast_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_PAST_label )
self.util_check_DENY_scenario( out, errCode, "", "retrieving %s" % created_custtag_PAST_label )
out, errCode = self.swgitUtil_Clone_.tag_create( TAG_NUM_ECHO_NOPAST["tagtype"] )
self.util_check_SUCC_scenario( out, errCode,
"Tagging in past also creates tag PAST/",
"tagging in past a past label" )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label )
self.util_check_SUCC_scenario( out, errCode, "", "retrieving %s" % created_custtag_label )
tagPast_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_PAST_label )
self.util_check_SUCC_scenario( out, errCode, "", "retrieving %s" % created_custtag_PAST_label )
# HERE IS IN PAST FOR EVERYONE (BEFORE ORIG/DEVELOP)
#
# repo
# |
# A
# |\ vvvv
# | B <-- HEAD
# | | ^^^^
# | C <-- orig_modbr
# |/
# D <-- o/dev and dev
# \
# E <-- prova_tag
#
def test_Tag_09_02_TagInPast_DetachedHead_PastForAll( self ):
self.clone_createBr( somecommmitondev = True )
self.modify_and_commit()
DETACH_INPAST = "origin/%s~1" % self.ORIG_MOD_FULL_BRANCH
sha_detachpoint, errCode = self.gitUtil_Clone_.get_currsha( DETACH_INPAST )
#goto detached point in past
out, errCode = self.swgitUtil_Clone_.branch_switch_to_br( DETACH_INPAST )
self.assertEqual( errCode, 0, "FAILED switch to br %s - out:\n%s" % ( DETACH_INPAST, out) )
sha_curr, errCode = self.gitUtil_Clone_.get_currsha()
self.assertEqual( sha_curr, sha_detachpoint, "FAILED switch to point, another place %s- out:\n%s" % (sha_detachpoint,out) )
#define custom tag
TAG_NUM_ECHO_PAST = copy.deepcopy( CUSTTAG_NUM )
script_comment = "comment: ABCD"
TAG_NUM_ECHO_PAST["tagtype"] = "TAGINPAST"
TAG_NUM_ECHO_PAST["hook_pretag_script"] = "echo \"%s\"" % script_comment
TAG_NUM_ECHO_PAST["tag_in_past"] = "tRuE"
self.swgitUtil_Clone_.tag_define_custom_tag( TAG_NUM_ECHO_PAST )
#overload option to NOT allow tagging
basekey_num = "swgit.%s." % TAG_NUM_ECHO_PAST["tagtype"]
tag_cfg = basekey_num + "tag-in-past"
self.swgitUtil_Clone_.set_cfg( tag_cfg, "FAlse" )
#create tag
out, errCode = self.swgitUtil_Clone_.tag_create( TAG_NUM_ECHO_PAST["tagtype"] )
self.util_check_DENY_scenario( out, errCode,
"is not configured to be put in past.",
"tagging in past a nopast label" )
#unset deny
out, errCode = self.swgitUtil_Clone_.set_cfg( tag_cfg, SWCFG_TEST_UNSET )
self.assertEqual( errCode, 0, "FAILED manually unsettin cfg - out:\n%s" % ( out ) )
#create tag
BASEBR = self.ORIG_MOD_FULL_BRANCH
created_custtag_label = "%s/%s/000" % ( BASEBR, TAG_NUM_ECHO_PAST["tagtype"] )
created_custtag_PAST_label = "PAST/%s" % created_custtag_label
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label )
self.util_check_DENY_scenario( out, errCode, "", "retrieving %s" % created_custtag_label )
tagPast_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_PAST_label )
self.util_check_DENY_scenario( out, errCode, "", "retrieving %s" % created_custtag_PAST_label )
out, errCode = self.swgitUtil_Clone_.tag_create( TAG_NUM_ECHO_PAST["tagtype"] )
self.util_check_SUCC_scenario( out, errCode,
"Tagging in past also creates tag PAST/",
"tagging in past a past label" )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label )
self.util_check_SUCC_scenario( out, errCode, "", "retrieving %s" % created_custtag_label )
tagPast_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_PAST_label )
self.util_check_SUCC_scenario( out, errCode, "", "retrieving %s" % created_custtag_PAST_label )
# HERE IS IN PAST ONLY FOR ME (AFTER ORIG/DEVELOP)
# NOT PUSHED NOTHING TO ORIGIN
#
# repo
# |
# A
# |\
# | B
# | |
# | C <-- orig_modbr
# |/
# D <-- o/dev and dev
# \ vvvv
# E <-- HEAD << N.B. HERE is in past, but only for me (not already pushed) >>
# | ^^^^
# F <-- prova_tag
#
def test_Tag_09_03_TagInPast_DetachedHead_PastOnlyForMe( self ):
self.clone_createBr( somecommmitondev = True )
self.modify_and_commit()
self.modify_and_commit()
DETACH_INPAST = "%s~1" % self.CREATED_BR
sha_detachpoint, errCode = self.gitUtil_Clone_.get_currsha( DETACH_INPAST )
#goto detached point in past
out, errCode = self.swgitUtil_Clone_.branch_switch_to_br( DETACH_INPAST )
self.assertEqual( errCode, 0, "FAILED switch to br %s - out:\n%s" % ( DETACH_INPAST, out) )
sha_curr, errCode = self.gitUtil_Clone_.get_currsha()
self.assertEqual( sha_curr, sha_detachpoint, "FAILED switch to point, another place %s- out:\n%s" % (sha_detachpoint,out) )
#define custom tag
TAG_NUM_ECHO_PAST = copy.deepcopy( CUSTTAG_NUM )
script_comment = "comment: ABCD"
TAG_NUM_ECHO_PAST["tagtype"] = "TAGINPAST"
TAG_NUM_ECHO_PAST["hook_pretag_script"] = "echo \"%s\"" % script_comment
TAG_NUM_ECHO_PAST["tag_in_past"] = "tRuE"
self.swgitUtil_Clone_.tag_define_custom_tag( TAG_NUM_ECHO_PAST )
#overload option to NOT allow tagging
basekey_num = "swgit.%s." % TAG_NUM_ECHO_PAST["tagtype"]
tag_cfg = basekey_num + "tag-in-past"
self.swgitUtil_Clone_.set_cfg( tag_cfg, "FAlse" )
#create tag
out, errCode = self.swgitUtil_Clone_.tag_create( TAG_NUM_ECHO_PAST["tagtype"] )
self.util_check_DENY_scenario( out, errCode,
"is not configured to be put in past.",
"tagging in past a nopast label" )
#unset deny
out, errCode = self.swgitUtil_Clone_.set_cfg( tag_cfg, SWCFG_TEST_UNSET )
self.assertEqual( errCode, 0, "FAILED manually unsettin cfg - out:\n%s" % ( out ) )
#create tag
BASEBR = self.CREATED_BR
created_custtag_label = "%s/%s/000" % ( BASEBR, TAG_NUM_ECHO_PAST["tagtype"] )
created_custtag_PAST_label = "PAST/%s" % created_custtag_label
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label )
self.util_check_DENY_scenario( out, errCode, "", "retrieving %s" % created_custtag_label )
tagPast_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_PAST_label )
self.util_check_DENY_scenario( out, errCode, "", "retrieving %s" % created_custtag_PAST_label )
out, errCode = self.swgitUtil_Clone_.tag_create( TAG_NUM_ECHO_PAST["tagtype"] )
self.util_check_SUCC_scenario( out, errCode,
"Tagging in past also creates tag PAST/",
"tagging in past a past label" )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label )
self.util_check_SUCC_scenario( out, errCode, "", "retrieving %s" % created_custtag_label )
tagPast_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_PAST_label )
self.util_check_SUCC_scenario( out, errCode, "", "retrieving %s" % created_custtag_PAST_label )
def test_Tag_09_04_TagInPast_AlreadyTaggedDownstream( self ):
self.clone_createBr( somecommmitondev = True )
#define custom tag
TAG_NUM_ECHO_PAST = copy.deepcopy( CUSTTAG_NUM )
script_comment = "comment: ABCD"
TAG_NUM_ECHO_PAST["tagtype"] = "TAGINPAST"
TAG_NUM_ECHO_PAST["hook_pretag_script"] = "echo \"%s\"" % script_comment
TAG_NUM_ECHO_PAST["tag_in_past"] = "tRuE"
self.swgitUtil_Clone_.tag_define_custom_tag( TAG_NUM_ECHO_PAST )
out, errCode = self.swgitUtil_Clone_.commit_minusA()
self.util_check_SUCC_scenario( out, errCode, "", "FAILED commit" )
self.modify_and_commit( alsotags = False )
self.modify_and_commit( alsotags = False )
#create tag (this is not in past)
BASEBR = self.CREATED_BR
created_custtag_label_0 = "%s/%s/000" % ( BASEBR, TAG_NUM_ECHO_PAST["tagtype"] )
created_custtag_PAST_label_0 = "PAST/%s" % created_custtag_label_0
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label_0 )
self.util_check_DENY_scenario( tag0_sha, errCode, "", "retrieving %s" % created_custtag_label_0 )
tagPast_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_PAST_label_0 )
self.util_check_DENY_scenario( tagPast_sha, errCode, "", "retrieving %s" % created_custtag_PAST_label_0 )
out, errCode = self.swgitUtil_Clone_.tag_create( TAG_NUM_ECHO_PAST["tagtype"] )
self.util_check_SUCC_scenario( out, errCode, "",
"tagging in past a past label" )
self.assertTrue( "Tagging in past also creates tag" not in out,
"FAILED tagging not in past with a past-label" )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label_0 )
self.util_check_SUCC_scenario( tag0_sha, errCode, "", "retrieving %s" % created_custtag_label_0 )
tagPast_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_PAST_label_0 )
self.util_check_DENY_scenario( tagPast_sha, errCode, "", "retrieving %s" % created_custtag_PAST_label_0 )
DETACH_INPAST = "%s~1" % self.CREATED_BR
sha_detachpoint, errCode = self.gitUtil_Clone_.get_currsha( DETACH_INPAST )
#goto detached point in past
out, errCode = self.swgitUtil_Clone_.branch_switch_to_br( DETACH_INPAST )
self.assertEqual( errCode, 0, "FAILED switch to br %s - out:\n%s" % ( DETACH_INPAST, out) )
sha_curr, errCode = self.gitUtil_Clone_.get_currsha()
self.assertEqual( sha_curr, sha_detachpoint, "FAILED switch to point, another place %s- out:\n%s" % (sha_detachpoint,out) )
#create tag (this is in past)
BASEBR = self.CREATED_BR
created_custtag_label_1 = "%s/%s/001" % ( BASEBR, TAG_NUM_ECHO_PAST["tagtype"] )
created_custtag_PAST_label_1 = "PAST/%s" % created_custtag_label_1
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label_1 )
self.util_check_DENY_scenario( out, errCode, "", "retrieving %s" % created_custtag_label_1 )
tagPast_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_PAST_label_1 )
self.util_check_DENY_scenario( out, errCode, "", "retrieving %s" % created_custtag_PAST_label_1 )
out, errCode = self.swgitUtil_Clone_.tag_create( TAG_NUM_ECHO_PAST["tagtype"] )
self.util_check_SUCC_scenario( out, errCode,
"Tagging in past also creates tag",
"tagging in past a past label" )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label_1 )
self.util_check_SUCC_scenario( out, errCode, "", "retrieving %s" % created_custtag_label_1 )
tagPast_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_PAST_label_1 )
self.util_check_SUCC_scenario( out, errCode, "", "retrieving %s" % created_custtag_PAST_label_1 )
#re-create tag (this is in past)
BASEBR = self.CREATED_BR
created_custtag_label_1 = "%s/%s/002" % ( BASEBR, TAG_NUM_ECHO_PAST["tagtype"] )
created_custtag_PAST_label_1 = "PAST/%s" % created_custtag_label_1
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label_1 )
self.util_check_DENY_scenario( out, errCode, "", "retrieving %s" % created_custtag_label_1 )
tagPast_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_PAST_label_1 )
self.util_check_DENY_scenario( out, errCode, "", "retrieving %s" % created_custtag_PAST_label_1 )
out, errCode = self.swgitUtil_Clone_.tag_create( TAG_NUM_ECHO_PAST["tagtype"] )
self.util_check_SUCC_scenario( out, errCode,
"Tagging in past also creates tag",
"tagging in past a past label" )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label_1 )
self.util_check_SUCC_scenario( out, errCode, "", "retrieving %s" % created_custtag_label_1 )
tagPast_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_PAST_label_1 )
self.util_check_SUCC_scenario( out, errCode, "", "retrieving %s" % created_custtag_PAST_label_1 )
#now force 1 per commit, same previous re-create, now must fails
basekey_num = "swgit.%s." % TAG_NUM_ECHO_PAST["tagtype"]
tag_cfg = basekey_num + "one-x-commit"
self.swgitUtil_Clone_.set_cfg( tag_cfg, "true" )
#re-re-create tag (this is in past), but max 1 per commit
BASEBR = self.CREATED_BR
created_custtag_label_1 = "%s/%s/003" % ( BASEBR, TAG_NUM_ECHO_PAST["tagtype"] )
created_custtag_PAST_label_1 = "PAST/%s" % created_custtag_label_1
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label_1 )
self.util_check_DENY_scenario( out, errCode, "", "retrieving %s" % created_custtag_label_1 )
tagPast_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_PAST_label_1 )
self.util_check_DENY_scenario( out, errCode, "", "retrieving %s" % created_custtag_PAST_label_1 )
out, errCode = self.swgitUtil_Clone_.tag_create( TAG_NUM_ECHO_PAST["tagtype"] )
self.util_check_DENY_scenario( out, errCode,
"Only 1", #label per-commit allowed.
"tagging in past a past label" )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label_1 )
self.util_check_DENY_scenario( out, errCode, "", "retrieving %s" % created_custtag_label_1 )
tagPast_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_PAST_label_1 )
self.util_check_DENY_scenario( out, errCode, "", "retrieving %s" % created_custtag_PAST_label_1 )
# This test try what happens when a branch comes first that INT/develop
# and in past this can happen
# git describe should choice orig_mod before orig/develop, and creates a unwanted name
# Very difficult to chage behaviour...
# or
# to understand what the user want when tagging there...
# keep it in this way for the moment
#
# repo
# | vvvv
# A <-- HEAD
# |\ ^^^^
# | B
# | |
# | C <-- orig_modbr
# |/
# D <-- o/dev and dev
# \
# E
# |
# F <-- prova_tag
#
def test_Tag_09_05_TagInPast_onOriginDev_abranchInMiddle( self ):
self.clone_createBr( somecommmitondev = True )
self.modify_and_commit()
sha_clonetime, errCode = self.gitUtil_Clone_.get_currsha()
#goto int
DETACHED_POINT = "%s/NEW/BRANCH~1" % self.ORIG_MOD_FULL_BRANCH
out, errCode = self.swgitUtil_Clone_.branch_switch_to_br( DETACHED_POINT )
self.assertEqual( errCode, 0, "FAILED switch to br - out:\n%s" % out )
sha_intbr, errCode = self.gitUtil_Clone_.get_currsha()
self.assertNotEqual( sha_clonetime, sha_intbr, "FAILED switch to int , another place %s- out:\n%s" % (sha_intbr,out) )
#define custom tag
TAG_NUM_ECHO_NOPAST = copy.deepcopy( CUSTTAG_NUM )
script_comment = "comment: ABCD"
TAG_NUM_ECHO_NOPAST["tagtype"] = "TAGINPAST"
TAG_NUM_ECHO_NOPAST["hook_pretag_script"] = "echo \"%s\"" % script_comment
TAG_NUM_ECHO_NOPAST["tag_in_past"] = "True"
self.swgitUtil_Clone_.tag_define_custom_tag( TAG_NUM_ECHO_NOPAST )
#create tag
# NOTE: this will choose nearest...
# not nice...
#BASEBR = ORIG_REPO_DEVEL_BRANCH
BASEBR = self.ORIG_MOD_FULL_BRANCH
created_custtag_label = "%s/%s/000" % ( BASEBR, TAG_NUM_ECHO_NOPAST["tagtype"] )
created_custtag_PAST_label = "PAST/%s" % created_custtag_label
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label )
self.util_check_DENY_scenario( out, errCode, "", "retrieving %s" % created_custtag_label )
tagPast_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_PAST_label )
self.util_check_DENY_scenario( out, errCode, "", "retrieving %s" % created_custtag_PAST_label )
out, errCode = self.swgitUtil_Clone_.tag_create( TAG_NUM_ECHO_NOPAST["tagtype"] )
self.util_check_SUCC_scenario( out, errCode,
"Tagging in past also creates tag PAST/",
"tagging in past a past label" )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label )
self.util_check_SUCC_scenario( out, errCode, "", "retrieving %s" % created_custtag_label )
tagPast_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_PAST_label )
self.util_check_SUCC_scenario
def test_Tag_09_06_TagInPast_NotPushOnOrigin( self ):
self.clone_createBr( somecommmitondev = True )
#define custom tag
TAG_NOPUSH = copy.deepcopy( CUSTTAG_NUM )
script_comment = "comment: ABCD"
TAG_NOPUSH["tagtype"] = "TAGINPAST"
TAG_NOPUSH["hook_pretag_script"] = "echo \"%s\"" % script_comment
TAG_NOPUSH["tag_in_past"] = "tRuE"
TAG_NOPUSH["push_on_origin"] = "false"
self.swgitUtil_Clone_.tag_define_custom_tag( TAG_NOPUSH )
out, errCode = self.swgitUtil_Clone_.commit_minusA()
self.util_check_SUCC_scenario( out, errCode, "", "FAILED commit" )
self.modify_and_commit( alsotags = False )
DETACH_INPAST = "HEAD~1"
sha_detachpoint, errCode = self.gitUtil_Clone_.get_currsha( DETACH_INPAST )
#goto detached point in past
out, errCode = self.swgitUtil_Clone_.branch_switch_to_br( DETACH_INPAST )
self.assertEqual( errCode, 0, "FAILED switch to br %s - out:\n%s" % ( DETACH_INPAST, out) )
sha_curr, errCode = self.gitUtil_Clone_.get_currsha()
self.assertEqual( sha_curr, sha_detachpoint, "FAILED switch to point, another place %s- out:\n%s" % (sha_detachpoint,out) )
#create tag
BASEBR = self.CREATED_BR
created_custtag_label_1 = "%s/%s/000" % ( BASEBR, TAG_NOPUSH["tagtype"] )
created_custtag_PAST_label_1 = "PAST/%s" % created_custtag_label_1
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label_1 )
self.util_check_DENY_scenario( out, errCode, "", "retrieving %s" % created_custtag_label_1 )
tagPast_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_PAST_label_1 )
self.util_check_DENY_scenario( out, errCode, "", "retrieving %s" % created_custtag_PAST_label_1 )
out, errCode = self.swgitUtil_Clone_.tag_create( TAG_NOPUSH["tagtype"] )
self.util_check_SUCC_scenario( out, errCode, "",
"tagging in past a past label" )
self.assertTrue( "Tagging in past also creates tag" not in out, "FAILED also created PAST tag for a not push on origin label" )
tag0_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_label_1 )
self.util_check_SUCC_scenario( out, errCode, "", "retrieving %s" % created_custtag_label_1 )
tagPast_sha, errCode = self.gitUtil_Clone_.ref2sha( created_custtag_PAST_label_1 )
self.util_check_DENY_scenario( out, errCode, "", "retrieving %s" % created_custtag_PAST_label_1 )
def test_Tag_10_00_ReuseComment( self ):
self.clone_createBr()
out, errCode = echo_on_file( self.MODIFY_FILE )
self.util_check_SUCC_scenario( out, errCode, "", "FAILED echo" )
TEST_MSG = "TEST COMMIT MESSAGE"
out, errCode = self.swgitUtil_Clone_.commit_minusA( msg = TEST_MSG )
self.util_check_SUCC_scenario( out, errCode, "", "FAILED commit" )
#define custom tag no echo
TAG_NOECHO = copy.deepcopy( CUSTTAG_NUM )
TAG_NOECHO["tagtype"] = "TAG_NOECHO"
self.swgitUtil_Clone_.tag_define_custom_tag( TAG_NOECHO )
#define custom tag echo
TAG_ECHO = copy.deepcopy( CUSTTAG_NUM )
script_comment = "TAG comment: ABCD"
TAG_ECHO["tagtype"] = "TAG_ECHO"
TAG_ECHO["hook_pretag_script"] = "echo \"%s\"" % script_comment
self.swgitUtil_Clone_.tag_define_custom_tag( TAG_ECHO )
#if test
out, errCode = self.swgitUtil_Clone_.tag_create( TAG_NOECHO["tagtype"], msg = "", reuse = False )
self.util_check_DENY_scenario( out, errCode,
"Please specify option -m or -M or",
"MUST FAIL, without -M, -m, noecho" )
out, errCode = self.swgitUtil_Clone_.tag_create( TAG_NOECHO["tagtype"], msg = "a msg", reuse = True )
self.util_check_DENY_scenario( out, errCode,
"Please specify only one among",
"MUST FAIL, with -M and -m" )
out, errCode = self.swgitUtil_Clone_.tag_create( TAG_ECHO["tagtype"], msg = "", reuse = False )
self.util_check_SUCC_scenario( out, errCode, "",
"FAILED, without -M, -m, but yes echo" )
out, errCode = self.swgitUtil_Clone_.tag_create( TAG_ECHO["tagtype"], msg = "a msg", reuse = True )
self.util_check_DENY_scenario( out, errCode,
"Please specify only one among",
"MUST FAIL, with -M and -m" )
#paylod test
#
#echo => -M goes into body tag
#
out, errCode = self.swgitUtil_Clone_.tag_create( TAG_ECHO["tagtype"], reuse = True )
self.util_check_SUCC_scenario( out, errCode, "",
"FAILED, with -M, echo" )
created_custtag_label = "%s/%s/000" % ( self.CREATED_BR, TAG_ECHO["tagtype"] )
tag_comment, errCode = self.gitUtil_Clone_.tag_get_body( created_custtag_label )
self.util_check_SUCC_scenario( tag_comment, errCode,
TEST_MSG,
"FAILED, get_comment" )
tag_comment, errCode = self.gitUtil_Clone_.tag_get_subject( created_custtag_label )
self.util_check_SUCC_scenario( tag_comment, errCode,
script_comment,
"FAILED, get_comment" )
#
#noecho => -M goes into subject tag
#
out, errCode = self.swgitUtil_Clone_.tag_create( TAG_NOECHO["tagtype"], reuse = True )
self.util_check_SUCC_scenario( out, errCode, "",
"FAILED, with -M, noecho" )
created_custtag_label = "%s/%s/000" % ( self.CREATED_BR, TAG_NOECHO["tagtype"] )
tag_comment, errCode = self.gitUtil_Clone_.tag_get_subject( created_custtag_label )
self.util_check_SUCC_scenario( tag_comment, errCode,
TEST_MSG,
"FAILED, get_comment" )
tag_comment, errCode = self.gitUtil_Clone_.tag_get_body( created_custtag_label )
self.assertTrue( script_comment not in tag_comment, "FAILED, get_comment" )
if __name__ == '__main__':
manage_debug_opt( sys.argv )
unittest.main()
| andreav/swgit | test/test_tag.py | Python | gpl-3.0 | 128,108 |
class Beer(object):
def sing(self, first, last=0):
verses = ''
for number in reversed(range(last, first + 1)):
verses += self.verse(number) + '\n'
return verses
def verse(self, number):
return ''.join([
"%s of beer on the wall, " % self._bottles(number).capitalize(),
"%s of beer.\n" % self._bottles(number),
self._action(number),
self._next_bottle(number),
])
def _action(self, current_verse):
if current_verse == 0:
return "Go to the store and buy some more, "
else:
return "Take %s down and pass it around, " % (
"one" if current_verse > 1 else "it"
)
def _next_bottle(self, current_verse):
return "%s of beer on the wall.\n" % self._bottles(self._next_verse(current_verse))
def _bottles(self, number):
if number == 0:
return 'no more bottles'
if number == 1:
return '1 bottle'
else:
return '%d bottles' % number
def _next_verse(self, current_verse):
return current_verse - 1 if current_verse > 0 else 99
| mscoutermarsh/exercism_coveralls | assignments/python/beer-song/example.py | Python | agpl-3.0 | 1,182 |
from pathlib import Path
import bcrypt
from falcon import HTTP_401, HTTP_409, HTTP_201
import hug
import jwt
from . import get_secret, token_verify
# This is used in protected api paths. Ex: hug.get('/protected', requires=auth.token_key_authentication)
token_key_authentication = hug.authentication.token(token_verify)
def get_user_folder(username: str) -> Path:
user_folder = Path('.') / 'data' / 'users' / username
if not user_folder.exists():
user_folder.mkdir(parents=True)
return user_folder
@hug.post('/register')
def register_user(username: str, password: str, response=None):
user_folder = get_user_folder(username)
user_pw_file = user_folder / 'password.txt'
if user_pw_file.exists():
response.status = HTTP_409
return {'error': 'username already in use'}
# 12 is default salt rounds
hashed_password = bcrypt.hashpw(str.encode(password), bcrypt.gensalt())
with user_pw_file.open(mode='wb') as f:
f.write(hashed_password)
response.status = HTTP_201
return {'status': 'ok'}
@hug.post('/signin')
def signin_user(username: str, password: str, response=None):
secret = get_secret()
user_folder = get_user_folder(username)
user_pw_file = user_folder / 'password.txt'
if not user_pw_file.exists():
response.status = HTTP_401
return {'error': 'Invalid credentials'}
with user_pw_file.open(mode='rb') as f:
hashed_password = f.readline()
if not bcrypt.checkpw(str.encode(password), hashed_password):
response.status = HTTP_401
return {'error': 'Invalid credentials'}
return {
"token": jwt.encode(
{'user': username},
secret,
algorithm='HS256'
)
}
| x10an14/overtime-calculator | overtime_calculator/auth.py | Python | mit | 1,759 |
# Copyright 2016 - SUSE Linux GmbH
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
version = '1.0.0'
driver = 'k8s_opensuse_v1'
container_version = '1.12.3'
| ArchiFleKs/magnum | contrib/drivers/k8s_opensuse_v1/version.py | Python | apache-2.0 | 683 |
# -*- coding: utf-8 -*-
"""
@author: Fabio Erculiani <[email protected]>
@contact: [email protected]
@copyright: Fabio Erculiani
@license: GPL-2
B{Entropy Command Line Client}.
"""
import sys
import argparse
from entropy.i18n import _
from entropy.const import const_convert_to_unicode
from entropy.output import purple, teal, darkred, brown, red, \
darkgreen, blue
from solo.commands.descriptor import SoloCommandDescriptor
from solo.commands.command import SoloCommand
from solo.utils import enlightenatom
class SoloMaskUnmask(SoloCommand):
def __init__(self, args, action):
SoloCommand.__init__(self, args)
self._ask = False
self._pretend = False
self._action = action
def _get_parser(self):
"""
Overridden from SoloCommand.
"""
descriptor = SoloCommandDescriptor.obtain_descriptor(
self.NAME)
parser = argparse.ArgumentParser(
description=descriptor.get_description(),
formatter_class=argparse.RawDescriptionHelpFormatter,
prog="%s %s" % (sys.argv[0], self.NAME))
parser.add_argument("packages", nargs='+',
metavar="<package>", help=_("package name"))
group = parser.add_mutually_exclusive_group()
group.add_argument("--ask", "-a", action="store_true",
default=self._ask,
help=_('ask before making any changes'))
group.add_argument("--pretend", "-p", action="store_true",
default=self._pretend,
help=_('only show what would be done'))
return parser
def bashcomp(self, last_arg):
"""
Overridden from SoloCommand.
"""
args = ["--ask", "-a", "--pretend", "-p"]
args.sort()
return self._bashcomp(sys.stdout, last_arg, args)
def man(self):
"""
Overridden from SoloCommand.
"""
return self._man()
def parse(self):
"""
Parse command.
"""
parser = self._get_parser()
try:
nsargs = parser.parse_args(self._args)
except IOError as err:
sys.stderr.write("%s\n" % (err,))
return parser.print_help, []
self._packages = nsargs.packages
self._pretend = nsargs.pretend
self._ask = nsargs.ask
return self._call_shared, [self._run]
def _run(self, entropy_client):
"""
Mask/Unmask code logic.
"""
found_pkg_atoms = []
for package in self._packages:
package_id, repoid = entropy_client.atom_match(
package, mask_filter = False)
if package_id == -1:
mytxt = "!!! %s: %s %s." % (
purple(_("Warning")),
teal(const_convert_to_unicode(package)),
purple(_("is not available")),
)
entropy_client.output("!!!", level="warning", importance=1)
entropy_client.output(mytxt, level="warning", importance=1)
entropy_client.output("!!!", level="warning", importance=1)
if len(package) > 3:
self._show_did_you_mean(
entropy_client, package,
from_installed=False)
entropy_client.output("!!!", level="warning", importance=1)
continue
found_pkg_atoms.append(package)
if not found_pkg_atoms:
entropy_client.output(
"%s." % (
darkred(_("No packages found")),
),
level="error", importance=1)
return 1
if self._ask or self._pretend:
mytxt = "%s:" % (
blue(_("These are the packages that would be handled")),
)
entropy_client.output(
mytxt,
header=red(" @@ "))
match_data = {}
for package in found_pkg_atoms:
matches, rc = entropy_client.atom_match(
package, multi_match = True, multi_repo = True,
mask_filter = False)
match_data[package] = matches
flags = darkgreen(" [")
if self._action == "mask":
flags += brown("M")
else:
flags += red("U")
flags += darkgreen("] ")
entropy_client.output(
darkred(" ##") + flags + purple(package))
if rc == 0:
# also show found pkgs
for package_id, repository_id in matches:
repo = entropy_client.open_repository(repository_id)
atom = repo.retrieveAtom(package_id)
entropy_client.output(
" -> " + enlightenatom(atom))
if self._pretend:
return 0
if self._ask:
answer = entropy_client.ask_question(
_("Would you like to continue?"))
if answer == _("No"):
return 0
for package, matches in match_data.items():
for match in matches:
if self._action == "mask":
done = entropy_client.mask_package_generic(match, package)
else:
done = entropy_client.unmask_package_generic(match, package)
if not done:
mytxt = "!!! %s: %s %s." % (
purple(_("Warning")),
teal(const_convert_to_unicode(package)),
purple(_("action not executed")),
)
entropy_client.output("!!!", level="warning", importance=1)
entropy_client.output(mytxt, level="warning", importance=1)
entropy_client.output("!!!", level="warning", importance=1)
entropy_client.output("Have a nice day.")
return 0
class SoloMask(SoloMaskUnmask):
"""
Main Solo Mask command.
"""
NAME = "mask"
ALIASES = []
INTRODUCTION = """\
Mask packages so that installation and update will be inhibited.
"""
SEE_ALSO = "equo-unmask(1)"
def __init__(self, args):
SoloMaskUnmask.__init__(self, args, SoloMask.NAME)
class SoloUnmask(SoloMaskUnmask):
"""
Main Solo Mask command.
"""
NAME = "unmask"
ALIASES = []
INTRODUCTION = """\
Unmask packages so that installation and update will be allowed.
"""
SEE_ALSO = "equo-mask(1)"
def __init__(self, args):
SoloMaskUnmask.__init__(self, args, SoloUnmask.NAME)
SoloCommandDescriptor.register(
SoloCommandDescriptor(
SoloMask,
SoloMask.NAME,
_("mask one or more packages"))
)
SoloCommandDescriptor.register(
SoloCommandDescriptor(
SoloUnmask,
SoloUnmask.NAME,
_("unmask one or more packages"))
)
| mudler/entropy | client/solo/commands/mask.py | Python | gpl-2.0 | 7,030 |
# Copyright (c) The University of Edinburgh 2014
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''
The core packages for Dispel4Py.
'''
| akrause2014/registry | test/__init__.py | Python | apache-2.0 | 636 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Moduls for the gui
"""
| RedBeardCode/QDjConChart | gui/__init__.py | Python | mit | 74 |
import json
import os
import re
import shutil
import subprocess
import sys
import tempfile
import textwrap
import time
import venv # type: ignore
import zipfile
from typing import Dict
from argparse import ArgumentParser
from dataclasses import dataclass
from pathlib import Path
from urllib.request import urlopen
from typing import Optional, Iterator, Tuple, List, Iterable
HOMEBREW_PYTHON = (3, 8)
# This should match the pattern in .bumpversion.cfg
VERSION_PATTERN = re.compile(
r'(?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)'
r'((?P<prerelease>[a-z]+)(?P<num>\d+))?'
)
class Version:
def __init__(self, raw: str) -> None:
self.raw = raw
match = VERSION_PATTERN.match(self.raw)
assert match is not None, f'Invalid version: {self.raw}'
groups = match.groupdict()
self.major: int = int(groups['major'])
self.minor: int = int(groups['minor'])
self.patch: int = int(groups['patch'])
self.prerelease: Optional[str] = None
self.num: Optional[int] = None
if groups['num'] is not None:
self.prerelease = groups['prerelease']
self.num = int(groups['num'])
def __str__(self):
return self.raw
def homebrew_class_name(self) -> str:
name = f'DbtAT{self.major}{self.minor}{self.patch}'
if self.prerelease is not None and self.num is not None:
name = f'{name}{self.prerelease.title()}{self.num}'
return name
def homebrew_filename(self):
version_str = f'{self.major}.{self.minor}.{self.patch}'
if self.prerelease is not None and self.num is not None:
version_str = f'{version_str}-{self.prerelease}{self.num}'
return f'dbt@{version_str}.rb'
@dataclass
class Arguments:
version: Version
part: str
path: Path
homebrew_path: Path
homebrew_set_default: bool
set_version: bool
build_pypi: bool
upload_pypi: bool
test_upload: bool
build_homebrew: bool
build_docker: bool
upload_docker: bool
write_requirements: bool
write_dockerfile: bool
@classmethod
def parse(cls) -> 'Arguments':
parser = ArgumentParser(
prog="Bump dbt's version, build packages"
)
parser.add_argument(
'version',
type=Version,
help="The version to set",
)
parser.add_argument(
'part',
type=str,
help="The part of the version to update",
)
parser.add_argument(
'--path',
type=Path,
help='The path to the dbt repository',
default=Path.cwd(),
)
parser.add_argument(
'--homebrew-path',
type=Path,
help='The path to the dbt homebrew install',
default=(Path.cwd() / '../homebrew-dbt'),
)
parser.add_argument(
'--homebrew-set-default',
action='store_true',
help='If set, make this homebrew version the default',
)
parser.add_argument(
'--no-set-version',
dest='set_version',
action='store_false',
help='Skip bumping the version',
)
parser.add_argument(
'--no-build-pypi',
dest='build_pypi',
action='store_false',
help='skip building pypi',
)
parser.add_argument(
'--no-build-docker',
dest='build_docker',
action='store_false',
help='skip building docker images',
)
parser.add_argument(
'--no-upload-docker',
dest='upload_docker',
action='store_false',
help='skip uploading docker images',
)
uploading = parser.add_mutually_exclusive_group()
uploading.add_argument(
'--upload-pypi',
dest='force_upload_pypi',
action='store_true',
help='upload to pypi even if building is disabled'
)
uploading.add_argument(
'--no-upload-pypi',
dest='no_upload_pypi',
action='store_true',
help='skip uploading to pypi',
)
parser.add_argument(
'--no-upload',
dest='test_upload',
action='store_false',
help='Skip uploading to pypitest',
)
parser.add_argument(
'--no-build-homebrew',
dest='build_homebrew',
action='store_false',
help='Skip building homebrew packages',
)
parser.add_argument(
'--no-write-requirements',
dest='write_requirements',
action='store_false',
help='Skip writing the requirements file. It must exist.'
)
parser.add_argument(
'--no-write-dockerfile',
dest='write_dockerfile',
action='store_false',
help='Skip writing the dockerfile. It must exist.'
)
parsed = parser.parse_args()
upload_pypi = parsed.build_pypi
if parsed.force_upload_pypi:
upload_pypi = True
elif parsed.no_upload_pypi:
upload_pypi = False
return cls(
version=parsed.version,
part=parsed.part,
path=parsed.path,
homebrew_path=parsed.homebrew_path,
homebrew_set_default=parsed.homebrew_set_default,
set_version=parsed.set_version,
build_pypi=parsed.build_pypi,
upload_pypi=upload_pypi,
test_upload=parsed.test_upload,
build_homebrew=parsed.build_homebrew,
build_docker=parsed.build_docker,
upload_docker=parsed.upload_docker,
write_requirements=parsed.write_requirements,
write_dockerfile=parsed.write_dockerfile,
)
def collect_output(cmd, cwd=None, stderr=subprocess.PIPE) -> str:
try:
result = subprocess.run(
cmd, cwd=cwd, check=True, stdout=subprocess.PIPE, stderr=stderr
)
except subprocess.CalledProcessError as exc:
print(f'Command {exc.cmd} failed')
if exc.output:
print(exc.output.decode('utf-8'))
if exc.stderr:
print(exc.stderr.decode('utf-8'), file=sys.stderr)
raise
return result.stdout.decode('utf-8')
def run_command(cmd, cwd=None) -> None:
result = collect_output(cmd, stderr=subprocess.STDOUT, cwd=cwd)
print(result)
def set_version(path: Path, version: Version, part: str):
# bumpversion --commit --no-tag --new-version "${version}" "${port}"
cmd = [
'bumpversion', '--commit', '--no-tag', '--new-version',
str(version), part
]
print(f'bumping version to {version}')
run_command(cmd, cwd=path)
print(f'bumped version to {version}')
class PypiBuilder:
_SUBPACKAGES = (
'core',
'plugins/postgres',
'plugins/redshift',
'plugins/bigquery',
'plugins/snowflake',
)
def __init__(self, dbt_path: Path):
self.dbt_path = dbt_path
@staticmethod
def _dist_for(path: Path, make=False) -> Path:
dist_path = path / 'dist'
if dist_path.exists():
shutil.rmtree(dist_path)
if make:
os.makedirs(dist_path)
build_path = path / 'build'
if build_path.exists():
shutil.rmtree(build_path)
return dist_path
@staticmethod
def _build_pypi_package(path: Path):
print(f'building package in {path}')
cmd = ['python', 'setup.py', 'sdist', 'bdist_wheel']
run_command(cmd, cwd=path)
print(f'finished building package in {path}')
@staticmethod
def _all_packages_in(path: Path) -> Iterator[Path]:
path = path / 'dist'
for pattern in ('*.tar.gz', '*.whl'):
yield from path.glob(pattern)
def _build_subpackage(self, name: str) -> Iterator[Path]:
subpath = self.dbt_path / name
self._dist_for(subpath)
self._build_pypi_package(subpath)
return self._all_packages_in(subpath)
def build(self):
print('building pypi packages')
dist_path = self._dist_for(self.dbt_path)
sub_pkgs: List[Path] = []
for path in self._SUBPACKAGES:
sub_pkgs.extend(self._build_subpackage(path))
# now build the main package
self._build_pypi_package(self.dbt_path)
# now copy everything from the subpackages in
for package in sub_pkgs:
shutil.copy(str(package), dist_path)
print('built pypi packages')
def upload(self, *, test=True):
cmd = ['twine', 'check']
cmd.extend(str(p) for p in self._all_packages_in(self.dbt_path))
run_command(cmd)
cmd = ['twine', 'upload']
if test:
cmd.extend(['--repository', 'pypitest'])
cmd.extend(str(p) for p in self._all_packages_in(self.dbt_path))
print('uploading packages: {}'.format(' '.join(cmd)))
run_command(cmd)
print('uploaded packages')
class PipInstaller(venv.EnvBuilder):
def __init__(self, packages: List[str]) -> None:
super().__init__(with_pip=True)
self.packages = packages
def post_setup(self, context):
# we can't run from the dbt directory or this gets all weird, so
# install from an empty temp directory and then remove it.
tmp = tempfile.mkdtemp()
cmd = [context.env_exe, '-m', 'pip', 'install', '--upgrade']
cmd.extend(self.packages)
print(f'installing {self.packages}')
try:
run_command(cmd, cwd=tmp)
finally:
os.rmdir(tmp)
print(f'finished installing {self.packages}')
def create(self, venv_path):
os.makedirs(venv_path.parent, exist_ok=True)
if venv_path.exists():
shutil.rmtree(venv_path)
return super().create(venv_path)
def _require_wheels(dbt_path: Path) -> List[Path]:
dist_path = dbt_path / 'dist'
wheels = list(dist_path.glob('*.whl'))
if not wheels:
raise ValueError(
f'No wheels found in {dist_path} - run scripts/build-wheels.sh'
)
return wheels
class DistFolderEnv(PipInstaller):
def __init__(self, dbt_path: Path) -> None:
self.wheels = _require_wheels(dbt_path)
super().__init__(packages=self.wheels)
class HomebrewVirtualenv(PipInstaller):
def __init__(self, dbt_version: Version) -> None:
super().__init__([f'dbt=={dbt_version}'])
@dataclass
class HomebrewDependency:
name: str
url: str
sha256: str
version: str
def render(self, indent: int = 2) -> str:
result = textwrap.dedent(f'''\
resource "{self.name}" do # {self.name}=={self.version}
url "{self.url}"
sha256 "{self.sha256}"
end
''')
return textwrap.indent(result, ' '*indent)
def __str__(self) -> str:
return self.render(indent=0)
@dataclass
class HomebrewTemplate:
url_data: str
hash_data: str
dependencies: List[HomebrewDependency]
def _make_venv_at(root: Path, name: str, builder: venv.EnvBuilder):
venv_path = root / name
os.makedirs(root, exist_ok=True)
if venv_path.exists():
shutil.rmtree(venv_path)
builder.create(venv_path)
return venv_path
class HomebrewBuilder:
def __init__(
self,
dbt_path: Path,
version: Version,
homebrew_path: Path,
set_default: bool,
) -> None:
self.dbt_path = dbt_path
self.version = version
self.homebrew_path = homebrew_path
self.set_default = set_default
self._template: Optional[HomebrewTemplate] = None
def make_venv(self) -> HomebrewVirtualenv:
env = HomebrewVirtualenv(self.version)
max_attempts = 10
for attempt in range(1, max_attempts+1):
# after uploading to pypi, it can take a few minutes for installing
# to work. Retry a few times...
try:
env.create(self.homebrew_venv_path)
return
except subprocess.CalledProcessError:
if attempt == max_attempts:
raise
else:
print(
f'installation failed - waiting 60s for pypi to see '
f'the new version (attempt {attempt}/{max_attempts})'
)
time.sleep(60)
return env
@property
def versioned_formula_path(self) -> Path:
return (
self.homebrew_path / 'Formula' / self.version.homebrew_filename()
)
@property
def default_formula_path(self) -> Path:
return (
self.homebrew_path / 'Formula/dbt.rb'
)
@property
def homebrew_venv_path(self) -> Path:
return self.dbt_path / 'build' / 'homebrew-venv'
@staticmethod
def _dbt_homebrew_formula_fmt() -> str:
return textwrap.dedent('''\
class {formula_name} < Formula
include Language::Python::Virtualenv
desc "Data build tool"
homepage "https://github.com/fishtown-analytics/dbt"
url "{url_data}"
sha256 "{hash_data}"
revision 1
bottle do
root_url "http://bottles.getdbt.com"
# bottle hashes + versions go here
end
depends_on "[email protected]"
depends_on "postgresql"
depends_on "python"
{dependencies}
{trailer}
end
''')
@staticmethod
def _dbt_homebrew_trailer() -> str:
dedented = textwrap.dedent('''\
def install
venv = virtualenv_create(libexec, "python3")
res = resources.map(&:name).to_set
res.each do |r|
venv.pip_install resource(r)
end
venv.pip_install_and_link buildpath
bin.install_symlink "#{libexec}/bin/dbt" => "dbt"
end
test do
(testpath/"dbt_project.yml").write(
"{name: 'test', version: '0.0.1', profile: 'default'}",
)
(testpath/".dbt/profiles.yml").write(
"{default: {outputs: {default: {type: 'postgres', threads: 1,
host: 'localhost', port: 5432, user: 'root', pass: 'password',
dbname: 'test', schema: 'test'}}, target: 'default'}}",
)
(testpath/"models/test.sql").write("select * from test")
system "#{bin}/dbt", "test"
end''')
return textwrap.indent(dedented, ' ')
def get_formula_data(
self, versioned: bool = True
) -> str:
fmt = self._dbt_homebrew_formula_fmt()
trailer = self._dbt_homebrew_trailer()
if versioned:
formula_name = self.version.homebrew_class_name()
else:
formula_name = 'Dbt'
dependencies_str = '\n'.join(
d.render() for d in self.template.dependencies
)
return fmt.format(
formula_name=formula_name,
version=self.version,
url_data=self.template.url_data,
hash_data=self.template.hash_data,
dependencies=dependencies_str,
trailer=trailer,
)
@property
def template(self) -> HomebrewTemplate:
if self._template is None:
self.make_venv()
print('done setting up virtualenv')
dependencies = []
dbt_package = None
for pkg in self._get_packages():
if pkg.name == 'dbt':
if pkg.version != str(self.version):
raise ValueError(
f'Found an invalid dbt=={pkg.version}, '
f'expected dbt=={self.version}'
)
dbt_package = pkg
else:
# we can assume that anything starting with dbt- in a fresh
# venv is a dbt package, I hope
if pkg.name.startswith('dbt-'):
if pkg.version != str(self.version):
raise ValueError(
f'Found an invalid {pkg.name}=={pkg.version}, '
f'expected {pkg.name}=={self.version}'
)
dependencies.append(pkg)
if dbt_package is None:
raise RuntimeError(
'never found dbt in "pip freeze -l" output'
)
template = HomebrewTemplate(
url_data=dbt_package.url,
hash_data=dbt_package.sha256,
dependencies=dependencies,
)
self._template = template
else:
template = self._template
return template
def _get_pypi_info(self, pkg: str, version: str) -> Tuple[str, str]:
fp = urlopen(f'https://pypi.org/pypi/{pkg}/{version}/json')
try:
data = json.load(fp)
finally:
fp.close()
assert 'urls' in data
for pkginfo in data['urls']:
assert 'packagetype' in pkginfo
if pkginfo['packagetype'] == 'sdist':
assert 'url' in pkginfo
assert 'digests' in pkginfo
assert 'sha256' in pkginfo['digests']
url = pkginfo['url']
sha256 = pkginfo['digests']['sha256']
return url, sha256
raise ValueError(f'Never got a valid sdist for {pkg}=={version}')
def _get_packages(self) -> Iterator[HomebrewDependency]:
pip = self.homebrew_venv_path / 'bin/pip'
cmd = [pip, 'freeze', '-l']
raw = collect_output(cmd).split('\n')
for line in raw:
if not line:
continue
parts = line.split('==')
if len(parts) != 2:
raise ValueError(
f'Could not parse pip freeze output line: {line}'
)
name, version = parts
url, sha256 = self._get_pypi_info(name, version)
dep = HomebrewDependency(
name=name, url=url, sha256=sha256, version=version
)
yield dep
def _remove_dbt_resource(self, lines: List[str]) -> Iterator[str]:
# TODO: fork poet or extract the good bits to avoid this
line_iter = iter(lines)
# don't do a double-newline or "brew audit" gets mad
for line in line_iter:
# skip the contents of the "dbt" resource block.
if line.strip() == 'resource "dbt" do':
for skip in line_iter:
if skip.strip() == 'end':
# skip the newline after 'end'
next(line_iter)
break
else:
yield line
def create_versioned_formula_file(self):
formula_contents = self.get_formula_data(versioned=True)
if self.versioned_formula_path.exists():
print('Homebrew formula path already exists, overwriting')
self.versioned_formula_path.write_text(formula_contents)
def commit_versioned_formula(self):
# add a commit for the new formula
run_command(
['git', 'add', self.versioned_formula_path],
cwd=self.homebrew_path
)
run_command(
['git', 'commit', '-m', f'add dbt@{self.version}'],
cwd=self.homebrew_path
)
def commit_default_formula(self):
run_command(
['git', 'add', self.default_formula_path],
cwd=self.homebrew_path
)
run_command(
['git', 'commit', '-m', f'upgrade dbt to {self.version}'],
cwd=self.homebrew_path
)
@staticmethod
def run_tests(formula_path: Path, audit: bool = True):
path = os.path.normpath(formula_path)
run_command(['brew', 'uninstall', '--force', path])
versions = [
l.strip() for l in
collect_output(['brew', 'list']).split('\n')
if l.strip().startswith('dbt@') or l.strip() == 'dbt'
]
if versions:
run_command(['brew', 'unlink'] + versions)
run_command(['brew', 'install', path])
run_command(['brew', 'test', path])
if audit:
run_command(['brew', 'audit', '--strict', path])
def create_default_package(self):
os.remove(self.default_formula_path)
formula_contents = self.get_formula_data(versioned=False)
self.default_formula_path.write_text(formula_contents)
def build(self):
self.create_versioned_formula_file()
# self.run_tests(formula_path=self.versioned_formula_path)
self.commit_versioned_formula()
if self.set_default:
self.create_default_package()
# self.run_tests(formula_path=self.default_formula_path, audit=False)
self.commit_default_formula()
class WheelInfo:
def __init__(self, path):
self.path = path
@staticmethod
def _extract_distinfo_path(wfile: zipfile.ZipFile) -> zipfile.Path:
zpath = zipfile.Path(root=wfile)
for path in zpath.iterdir():
if path.name.endswith('.dist-info'):
return path
raise ValueError('Wheel with no dist-info?')
def get_metadata(self) -> Dict[str, str]:
with zipfile.ZipFile(self.path) as wf:
distinfo = self._extract_distinfo_path(wf)
metadata = distinfo / 'METADATA'
metadata_dict: Dict[str, str] = {}
for line in metadata.read_text().split('\n'):
parts = line.split(': ', 1)
if len(parts) == 2:
metadata_dict[parts[0]] = parts[1]
return metadata_dict
def package_name(self) -> str:
metadata = self.get_metadata()
if 'Name' not in metadata:
raise ValueError('Wheel with no name?')
return metadata['Name']
class DockerBuilder:
"""The docker builder requires the existence of a dbt package"""
def __init__(self, dbt_path: Path, version: Version) -> None:
self.dbt_path = dbt_path
self.version = version
@property
def docker_path(self) -> Path:
return self.dbt_path / 'docker'
@property
def dockerfile_name(self) -> str:
return f'Dockerfile.{self.version}'
@property
def dockerfile_path(self) -> Path:
return self.docker_path / self.dockerfile_name
@property
def requirements_path(self) -> Path:
return self.docker_path / 'requirements'
@property
def requirements_file_name(self) -> str:
return f'requirements.{self.version}.txt'
@property
def dockerfile_venv_path(self) -> Path:
return self.dbt_path / 'build' / 'docker-venv'
@property
def requirements_txt_path(self) -> Path:
return self.requirements_path / self.requirements_file_name
def make_venv(self) -> DistFolderEnv:
env = DistFolderEnv(self.dbt_path)
env.create(self.dockerfile_venv_path)
return env
def get_frozen(self) -> str:
env = self.make_venv()
pip_path = self.dockerfile_venv_path / 'bin/pip'
cmd = [pip_path, 'freeze']
wheel_names = {
WheelInfo(wheel_path).package_name() for wheel_path in env.wheels
}
# remove the dependencies in dbt itself
return '\n'.join([
dep for dep in collect_output(cmd).split('\n')
if dep.split('==')[0] not in wheel_names
])
def write_lockfile(self):
freeze = self.get_frozen()
path = self.requirements_txt_path
if path.exists():
raise ValueError(f'Found existing requirements file at {path}!')
os.makedirs(path.parent, exist_ok=True)
path.write_text(freeze)
def get_dockerfile_contents(self):
dist_path = (self.dbt_path / 'dist').relative_to(Path.cwd())
wheel_paths = ' '.join(
os.path.join('.', 'dist', p.name)
for p in _require_wheels(self.dbt_path)
)
requirements_path = self.requirements_txt_path.relative_to(Path.cwd())
return textwrap.dedent(
f'''\
FROM python:3.8.1-slim-buster
RUN apt-get update && \
apt-get dist-upgrade -y && \
apt-get install -y --no-install-recommends \
git software-properties-common make build-essential \
ca-certificates libpq-dev && \
apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
COPY {requirements_path} ./{self.requirements_file_name}
COPY {dist_path} ./dist
RUN pip install --upgrade pip setuptools
RUN pip install --requirement ./{self.requirements_file_name}
RUN pip install {wheel_paths}
RUN useradd -mU dbt_user
ENV PYTHONIOENCODING=utf-8
ENV LANG C.UTF-8
WORKDIR /usr/app
VOLUME /usr/app
USER dbt_user
ENTRYPOINT dbt
'''
)
def write_dockerfile(self):
dockerfile = self.get_dockerfile_contents()
path = self.dockerfile_path
if path.exists():
raise ValueError(f'Found existing docker file at {path}!')
os.makedirs(path.parent, exist_ok=True)
path.write_text(dockerfile)
@property
def image_tag(self):
return f'dbt:{self.version}'
@property
def remote_tag(self):
return f'fishtownanalytics/{self.image_tag}'
def create_docker_image(self):
run_command(
[
'docker', 'build',
'-f', self.dockerfile_path,
'--tag', self.image_tag,
# '--no-cache',
self.dbt_path,
],
cwd=self.dbt_path
)
def set_remote_tag(self):
# tag it
run_command(
['docker', 'tag', self.image_tag, self.remote_tag],
cwd=self.dbt_path,
)
def commit_docker_folder(self):
# commit the contents of docker/
run_command(
['git', 'add', 'docker'],
cwd=self.dbt_path
)
commit_msg = f'Add {self.image_tag} dockerfiles and requirements'
run_command(['git', 'commit', '-m', commit_msg], cwd=self.dbt_path)
def build(
self,
write_requirements: bool = True,
write_dockerfile: bool = True
):
if write_requirements:
self.write_lockfile()
if write_dockerfile:
self.write_dockerfile()
self.commit_docker_folder()
self.create_docker_image()
self.set_remote_tag()
def push(self):
run_command(
['docker', 'push', self.remote_tag]
)
def sanity_check():
if sys.version_info[:len(HOMEBREW_PYTHON)] != HOMEBREW_PYTHON:
python_version_str = '.'.join(str(i) for i in HOMEBREW_PYTHON)
print(f'This script must be run with python {python_version_str}')
sys.exit(1)
# avoid "what's a bdist_wheel" errors
try:
import wheel # type: ignore # noqa
except ImportError:
print(
'The wheel package is required to build. Please run:\n'
'pip install -r dev_requirements.txt'
)
sys.exit(1)
def upgrade_to(args: Arguments):
if args.set_version:
set_version(args.path, args.version, args.part)
builder = PypiBuilder(args.path)
if args.build_pypi:
builder.build()
if args.upload_pypi:
if args.test_upload:
builder.upload()
input(
f'Ensure https://test.pypi.org/project/dbt/{args.version}/ '
'exists and looks reasonable'
)
builder.upload(test=False)
if args.build_homebrew:
if args.upload_pypi:
print('waiting a minute for pypi before trying to pip install')
# if we uploaded to pypi, wait a minute before we bother trying to
# pip install
time.sleep(60)
HomebrewBuilder(
dbt_path=args.path,
version=args.version,
homebrew_path=args.homebrew_path,
set_default=args.homebrew_set_default,
).build()
if args.build_docker:
builder = DockerBuilder(
dbt_path=args.path,
version=args.version,
)
builder.build(
write_requirements=args.write_requirements,
write_dockerfile=args.write_dockerfile,
)
if args.upload_docker:
builder.push()
def main():
sanity_check()
args = Arguments.parse()
upgrade_to(args)
if __name__ == '__main__':
main()
| fishtown-analytics/dbt | scripts/build-dbt.py | Python | apache-2.0 | 29,183 |
from contextlib import suppress
from functools import update_wrapper
from weakref import WeakSet
from django.apps import apps
from django.contrib.admin import ModelAdmin, actions
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.core.exceptions import ImproperlyConfigured
from django.db.models.base import ModelBase
from django.http import Http404, HttpResponseRedirect
from django.template.response import TemplateResponse
from django.urls import NoReverseMatch, reverse
from django.utils.text import capfirst
from django.utils.translation import gettext as _, gettext_lazy
from django.views.decorators.cache import never_cache
from django.views.decorators.csrf import csrf_protect
from django.views.i18n import JavaScriptCatalog
all_sites = WeakSet()
class AlreadyRegistered(Exception):
pass
class NotRegistered(Exception):
pass
class AdminSite:
"""
An AdminSite object encapsulates an instance of the Django admin application, ready
to be hooked in to your URLconf. Models are registered with the AdminSite using the
register() method, and the get_urls() method can then be used to access Django view
functions that present a full admin interface for the collection of registered
models.
"""
# Text to put at the end of each page's <title>.
site_title = gettext_lazy('Django site admin')
# Text to put in each page's <h1>.
site_header = gettext_lazy('Django administration')
# Text to put at the top of the admin index page.
index_title = gettext_lazy('Site administration')
# URL for the "View site" link at the top of each admin page.
site_url = '/'
_empty_value_display = '-'
login_form = None
index_template = None
app_index_template = None
login_template = None
logout_template = None
password_change_template = None
password_change_done_template = None
def __init__(self, name='admin'):
self._registry = {} # model_class class -> admin_class instance
self.name = name
self._actions = {'delete_selected': actions.delete_selected}
self._global_actions = self._actions.copy()
all_sites.add(self)
def check(self, app_configs):
"""
Run the system checks on all ModelAdmins, except if they aren't
customized at all.
"""
if app_configs is None:
app_configs = apps.get_app_configs()
app_configs = set(app_configs) # Speed up lookups below
errors = []
modeladmins = (o for o in self._registry.values() if o.__class__ is not ModelAdmin)
for modeladmin in modeladmins:
if modeladmin.model._meta.app_config in app_configs:
errors.extend(modeladmin.check())
return errors
def register(self, model_or_iterable, admin_class=None, **options):
"""
Register the given model(s) with the given admin class.
The model(s) should be Model classes, not instances.
If an admin class isn't given, use ModelAdmin (the default admin
options). If keyword arguments are given -- e.g., list_display --
apply them as options to the admin class.
If a model is already registered, raise AlreadyRegistered.
If a model is abstract, raise ImproperlyConfigured.
"""
if not admin_class:
admin_class = ModelAdmin
if isinstance(model_or_iterable, ModelBase):
model_or_iterable = [model_or_iterable]
for model in model_or_iterable:
if model._meta.abstract:
raise ImproperlyConfigured(
'The model %s is abstract, so it cannot be registered with admin.' % model.__name__
)
if model in self._registry:
raise AlreadyRegistered('The model %s is already registered' % model.__name__)
# Ignore the registration if the model has been
# swapped out.
if not model._meta.swapped:
# If we got **options then dynamically construct a subclass of
# admin_class with those **options.
if options:
# For reasons I don't quite understand, without a __module__
# the created class appears to "live" in the wrong place,
# which causes issues later on.
options['__module__'] = __name__
admin_class = type("%sAdmin" % model.__name__, (admin_class,), options)
# Instantiate the admin class to save in the registry
self._registry[model] = admin_class(model, self)
def unregister(self, model_or_iterable):
"""
Unregister the given model(s).
If a model isn't already registered, raise NotRegistered.
"""
if isinstance(model_or_iterable, ModelBase):
model_or_iterable = [model_or_iterable]
for model in model_or_iterable:
if model not in self._registry:
raise NotRegistered('The model %s is not registered' % model.__name__)
del self._registry[model]
def is_registered(self, model):
"""
Check if a model class is registered with this `AdminSite`.
"""
return model in self._registry
def add_action(self, action, name=None):
"""
Register an action to be available globally.
"""
name = name or action.__name__
self._actions[name] = action
self._global_actions[name] = action
def disable_action(self, name):
"""
Disable a globally-registered action. Raise KeyError for invalid names.
"""
del self._actions[name]
def get_action(self, name):
"""
Explicitly get a registered global action whether it's enabled or
not. Raise KeyError for invalid names.
"""
return self._global_actions[name]
@property
def actions(self):
"""
Get all the enabled actions as an iterable of (name, func).
"""
return iter(self._actions.items())
@property
def empty_value_display(self):
return self._empty_value_display
@empty_value_display.setter
def empty_value_display(self, empty_value_display):
self._empty_value_display = empty_value_display
def has_permission(self, request):
"""
Return True if the given HttpRequest has permission to view
*at least one* page in the admin site.
"""
return request.user.is_active and request.user.is_staff
def admin_view(self, view, cacheable=False):
"""
Decorator to create an admin view attached to this ``AdminSite``. This
wraps the view and provides permission checking by calling
``self.has_permission``.
You'll want to use this from within ``AdminSite.get_urls()``:
class MyAdminSite(AdminSite):
def get_urls(self):
from django.conf.urls import url
urls = super().get_urls()
urls += [
url(r'^my_view/$', self.admin_view(some_view))
]
return urls
By default, admin_views are marked non-cacheable using the
``never_cache`` decorator. If the view can be safely cached, set
cacheable=True.
"""
def inner(request, *args, **kwargs):
if not self.has_permission(request):
if request.path == reverse('admin:logout', current_app=self.name):
index_path = reverse('admin:index', current_app=self.name)
return HttpResponseRedirect(index_path)
# Inner import to prevent django.contrib.admin (app) from
# importing django.contrib.auth.models.User (unrelated model).
from django.contrib.auth.views import redirect_to_login
return redirect_to_login(
request.get_full_path(),
reverse('admin:login', current_app=self.name)
)
return view(request, *args, **kwargs)
if not cacheable:
inner = never_cache(inner)
# We add csrf_protect here so this function can be used as a utility
# function for any view, without having to repeat 'csrf_protect'.
if not getattr(view, 'csrf_exempt', False):
inner = csrf_protect(inner)
return update_wrapper(inner, view)
def get_urls(self):
from django.conf.urls import url, include
# Since this module gets imported in the application's root package,
# it cannot import models from other applications at the module level,
# and django.contrib.contenttypes.views imports ContentType.
from django.contrib.contenttypes import views as contenttype_views
def wrap(view, cacheable=False):
def wrapper(*args, **kwargs):
return self.admin_view(view, cacheable)(*args, **kwargs)
wrapper.admin_site = self
return update_wrapper(wrapper, view)
# Admin-site-wide views.
urlpatterns = [
url(r'^$', wrap(self.index), name='index'),
url(r'^login/$', self.login, name='login'),
url(r'^logout/$', wrap(self.logout), name='logout'),
url(r'^password_change/$', wrap(self.password_change, cacheable=True), name='password_change'),
url(r'^password_change/done/$', wrap(self.password_change_done, cacheable=True),
name='password_change_done'),
url(r'^jsi18n/$', wrap(self.i18n_javascript, cacheable=True), name='jsi18n'),
url(r'^r/(?P<content_type_id>\d+)/(?P<object_id>.+)/$', wrap(contenttype_views.shortcut),
name='view_on_site'),
]
# Add in each model's views, and create a list of valid URLS for the
# app_index
valid_app_labels = []
for model, model_admin in self._registry.items():
urlpatterns += [
url(r'^%s/%s/' % (model._meta.app_label, model._meta.model_name), include(model_admin.urls)),
]
if model._meta.app_label not in valid_app_labels:
valid_app_labels.append(model._meta.app_label)
# If there were ModelAdmins registered, we should have a list of app
# labels for which we need to allow access to the app_index view,
if valid_app_labels:
regex = r'^(?P<app_label>' + '|'.join(valid_app_labels) + ')/$'
urlpatterns += [
url(regex, wrap(self.app_index), name='app_list'),
]
return urlpatterns
@property
def urls(self):
return self.get_urls(), 'admin', self.name
def each_context(self, request):
"""
Return a dictionary of variables to put in the template context for
*every* page in the admin site.
For sites running on a subpath, use the SCRIPT_NAME value if site_url
hasn't been customized.
"""
script_name = request.META['SCRIPT_NAME']
site_url = script_name if self.site_url == '/' and script_name else self.site_url
return {
'site_title': self.site_title,
'site_header': self.site_header,
'site_url': site_url,
'has_permission': self.has_permission(request),
'available_apps': self.get_app_list(request),
}
def password_change(self, request, extra_context=None):
"""
Handle the "change password" task -- both form display and validation.
"""
from django.contrib.admin.forms import AdminPasswordChangeForm
from django.contrib.auth.views import PasswordChangeView
url = reverse('admin:password_change_done', current_app=self.name)
defaults = {
'form_class': AdminPasswordChangeForm,
'success_url': url,
'extra_context': dict(self.each_context(request), **(extra_context or {})),
}
if self.password_change_template is not None:
defaults['template_name'] = self.password_change_template
request.current_app = self.name
return PasswordChangeView.as_view(**defaults)(request)
def password_change_done(self, request, extra_context=None):
"""
Display the "success" page after a password change.
"""
from django.contrib.auth.views import PasswordChangeDoneView
defaults = {
'extra_context': dict(self.each_context(request), **(extra_context or {})),
}
if self.password_change_done_template is not None:
defaults['template_name'] = self.password_change_done_template
request.current_app = self.name
return PasswordChangeDoneView.as_view(**defaults)(request)
def i18n_javascript(self, request, extra_context=None):
"""
Display the i18n JavaScript that the Django admin requires.
`extra_context` is unused but present for consistency with the other
admin views.
"""
return JavaScriptCatalog.as_view(packages=['django.contrib.admin'])(request)
@never_cache
def logout(self, request, extra_context=None):
"""
Log out the user for the given HttpRequest.
This should *not* assume the user is already logged in.
"""
from django.contrib.auth.views import LogoutView
defaults = {
'extra_context': dict(
self.each_context(request),
# Since the user isn't logged out at this point, the value of
# has_permission must be overridden.
has_permission=False,
**(extra_context or {})
),
}
if self.logout_template is not None:
defaults['template_name'] = self.logout_template
request.current_app = self.name
return LogoutView.as_view(**defaults)(request)
@never_cache
def login(self, request, extra_context=None):
"""
Display the login form for the given HttpRequest.
"""
if request.method == 'GET' and self.has_permission(request):
# Already logged-in, redirect to admin index
index_path = reverse('admin:index', current_app=self.name)
return HttpResponseRedirect(index_path)
from django.contrib.auth.views import LoginView
# Since this module gets imported in the application's root package,
# it cannot import models from other applications at the module level,
# and django.contrib.admin.forms eventually imports User.
from django.contrib.admin.forms import AdminAuthenticationForm
context = dict(
self.each_context(request),
title=_('Log in'),
app_path=request.get_full_path(),
username=request.user.get_username(),
)
if (REDIRECT_FIELD_NAME not in request.GET and
REDIRECT_FIELD_NAME not in request.POST):
context[REDIRECT_FIELD_NAME] = reverse('admin:index', current_app=self.name)
context.update(extra_context or {})
defaults = {
'extra_context': context,
'authentication_form': self.login_form or AdminAuthenticationForm,
'template_name': self.login_template or 'admin/login.html',
}
request.current_app = self.name
return LoginView.as_view(**defaults)(request)
def _build_app_dict(self, request, label=None):
"""
Build the app dictionary. The optional `label` parameter filters models
of a specific app.
"""
app_dict = {}
if label:
models = {
m: m_a for m, m_a in self._registry.items()
if m._meta.app_label == label
}
else:
models = self._registry
for model, model_admin in models.items():
app_label = model._meta.app_label
has_module_perms = model_admin.has_module_permission(request)
if not has_module_perms:
continue
perms = model_admin.get_model_perms(request)
# Check whether user has any perm for this module.
# If so, add the module to the model_list.
if True not in perms.values():
continue
info = (app_label, model._meta.model_name)
model_dict = {
'name': capfirst(model._meta.verbose_name_plural),
'object_name': model._meta.object_name,
'perms': perms,
}
if perms.get('change'):
with suppress(NoReverseMatch):
model_dict['admin_url'] = reverse('admin:%s_%s_changelist' % info, current_app=self.name)
if perms.get('add'):
with suppress(NoReverseMatch):
model_dict['add_url'] = reverse('admin:%s_%s_add' % info, current_app=self.name)
if app_label in app_dict:
app_dict[app_label]['models'].append(model_dict)
else:
app_dict[app_label] = {
'name': apps.get_app_config(app_label).verbose_name,
'app_label': app_label,
'app_url': reverse(
'admin:app_list',
kwargs={'app_label': app_label},
current_app=self.name,
),
'has_module_perms': has_module_perms,
'models': [model_dict],
}
if label:
return app_dict.get(label)
return app_dict
def get_app_list(self, request):
"""
Return a sorted list of all the installed apps that have been
registered in this site.
"""
app_dict = self._build_app_dict(request)
# Sort the apps alphabetically.
app_list = sorted(app_dict.values(), key=lambda x: x['name'].lower())
# Sort the models alphabetically within each app.
for app in app_list:
app['models'].sort(key=lambda x: x['name'])
return app_list
@never_cache
def index(self, request, extra_context=None):
"""
Display the main admin index page, which lists all of the installed
apps that have been registered in this site.
"""
app_list = self.get_app_list(request)
context = dict(
self.each_context(request),
title=self.index_title,
app_list=app_list,
)
context.update(extra_context or {})
request.current_app = self.name
return TemplateResponse(request, self.index_template or 'admin/index.html', context)
def app_index(self, request, app_label, extra_context=None):
app_dict = self._build_app_dict(request, app_label)
if not app_dict:
raise Http404('The requested admin page does not exist.')
# Sort the models alphabetically within each app.
app_dict['models'].sort(key=lambda x: x['name'])
app_name = apps.get_app_config(app_label).verbose_name
context = dict(
self.each_context(request),
title=_('%(app)s administration') % {'app': app_name},
app_list=[app_dict],
app_label=app_label,
)
context.update(extra_context or {})
request.current_app = self.name
return TemplateResponse(request, self.app_index_template or [
'admin/%s/app_index.html' % app_label,
'admin/app_index.html'
], context)
# This global object represents the default admin site, for the common case.
# You can instantiate AdminSite in your own code to create a custom admin site.
site = AdminSite()
| tysonclugg/django | django/contrib/admin/sites.py | Python | bsd-3-clause | 19,895 |
__author__ = 'dmorina'
from rest_framework import status, viewsets
from rest_framework.response import Response
from rest_framework.permissions import IsAuthenticated
from rest_framework import mixins
from crowdsourcing.models import Conversation, Message
from crowdsourcing.serializers.message import ConversationSerializer, MessageSerializer
class ConversationViewSet(viewsets.ModelViewSet):
queryset = Conversation.objects.all()
serializer_class = ConversationSerializer
permission_classes=[IsAuthenticated]
def create(self, request, *args, **kwargs):
serializer = ConversationSerializer(data=request.data)
if serializer.is_valid():
serializer.create(sender=request.user)
return Response({'status': 'Conversation created'})
else:
return Response(serializer.errors,
status=status.HTTP_400_BAD_REQUEST)
class MessageViewSet(viewsets.ModelViewSet):
queryset = Message.objects.all()
serializer_class = MessageSerializer
permission_classes=[IsAuthenticated]
def create(self, request, *args, **kwargs):
serializer = MessageSerializer(data=request.data)
if serializer.is_valid():
serializer.create(sender=request.user)
return Response({'status': 'Message sent'})
else:
return Response(serializer.errors,
status=status.HTTP_400_BAD_REQUEST) | radhikabhanu/crowdsource-platform | crowdsourcing/viewsets/message.py | Python | mit | 1,449 |
__author__ = 'mmoisen'
import peewee
#MYSQL_USERNAME='username'
#MYSQL_DATABASE='database'
#MYSQL_PASSWORD='password'
#MYSQL_HOST='host'
'''
todo:
change this to a database proxy to use different dbs
'''
BREW_PROPERTIES_FILE = "brew.properties"
hostnames = ['raspberrypi','raspberrypi1']
try:
from local_settings import *
except ImportError:
print "Create a 'local_settings.py' file (or edit settings.py) containing the following constants:\nMYSQL_USERNAME=''\nMYSQL_HOST=''\nMYSQL_PASSWORD=''\nMYSQL_DATABASE=''\n"
db = peewee.MySQLDatabase(MYSQL_DATABASE,user=MYSQL_USERNAME, host=MYSQL_HOST,passwd=MYSQL_PASSWORD)
def get_db():
db.connect()
db.get_conn().ping(True)
return db
class BaseModel(peewee.Model):
class Meta:
database = db
| mkmoisen/brew | settings.py | Python | mit | 777 |
#!/usr/bin/env python
import unittest
from pycoin.serialize import h2b
from pycoin.intbytes import int_to_bytes, bytes_from_ints
from pycoin.tx.script.tools import bin_script, compile, disassemble, int_to_script_bytes, int_from_script_bytes
from pycoin.tx.script.opcodes import OPCODE_LIST
from pycoin.tx.script.vm import eval_script
class ToolsTest(unittest.TestCase):
def test_bin_script(self):
def test_bytes(as_bytes):
script = bin_script([as_bytes])
stack = []
eval_script(script, None, lock_time=0, stack=stack, disallow_long_scripts=False)
assert len(stack) == 1
assert stack[0] == as_bytes
def test_val(n):
as_bytes = int_to_bytes(n)
test_bytes(as_bytes)
for i in range(100):
test_val(100)
for i in range(0xfff0, 0x10004):
test_val(i)
for i in range(0xfffff0, 0x1000005):
test_val(i)
for l in (1, 2, 3, 254, 255, 256, 257, 258, 0xfff9, 0xfffe, 0xffff, 0x10000, 0x10001, 0x10005):
for v in (1, 2, 3, 4, 15, 16, 17, 18):
b = bytes_from_ints([v] * l)
test_bytes(b)
b = bytes_from_ints([30] * (0x1000000+1))
for l in (0x1000000-1, 0x1000000, 0x1000000+1):
test_bytes(b[:l])
def test_compile_decompile(self):
def check(s):
b1 = compile(s)
s1 = disassemble(b1)
b2 = compile(s1)
self.assertEqual(s, s1)
self.assertEqual(b1, b2)
def build_hex(size, a, b):
"build some random-looking hex"
return "[%s]" % "".join("%02x" % (((i+a)*b) & 0xff) for i in range(size))
scripts = []
check("[ff]")
check("[ff03]")
check("[ff030102]")
check("[55aabbccddeeff112131]")
long_hex_260 = build_hex(260, 13, 93)
long_hex_270 = build_hex(270, 11, 47)
check("%s %s" % (long_hex_260, long_hex_270))
for opcode, code in OPCODE_LIST:
if opcode.startswith("OP_PUSHDATA"):
# these disassemble differently
continue
check(opcode)
def test_tx_7e0114e93f903892b4dff5526a8cab674b2825fd715c4a95f852a1aed634a0f6(self):
# this tx is from testnet. We add an extra "OP_0" to the end
# we need to check that the script is being disassembled correctly
script = h2b("0047304402201f994ca49451bc764fd090f31adb2fa4381b91f967dc05a6f538d4d1baaa83cd022"
"06ef3ad06de7890bc4130b4f57401412ca94897ea19b646f794a4472375351c1f0147304402201f"
"994ca49451bc764fd090f31adb2fa4381b91f967dc05a6f538d4d1baaa83cd02204655e9eccac41"
"2407dfc3e5753a0f2ac605e41c7eb91630dc67137f2d8081c3a014d0b0152410479be667ef9dcbb"
"ac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798483ada7726a3c4655da4fbfc0e110"
"8a8fd17b448a68554199c47d08ffb10d4b84104c6047f9441ed7d6d3045406e95c07cd85c778e4b"
"8cef3ca7abac09b95c709ee51ae168fea63dc339a3c58419466ceaeef7f632653266d0e1236431a"
"950cfe52a4104f9308a019258c31049344f85f89d5229b531c845836f99b08601f113bce036f938"
"8f7b0f632de8140fe337e62a37f3566500a99934c2231b6cb9fd7584b8e6724104e493dbf1c10d8"
"0f3581e4904930b1404cc6c13900ee0758474fa94abe8c4cd1351ed993ea0d455b75642e2098ea5"
"1448d967ae33bfbdfe40cfe97bdc4773992254ae00")
d1 = disassemble(script).split()
self.assertEqual(len(d1), 5)
self.assertEqual(d1[-1], "OP_0")
def test_int_to_from_script_bytes(self):
for i in range(-127, 127):
self.assertEqual(int_from_script_bytes(int_to_script_bytes(i)), i)
for i in range(-1024, 1024, 16):
self.assertEqual(int_from_script_bytes(int_to_script_bytes(i)), i)
for i in range(-1024*1024, 1024*1024, 10000):
self.assertEqual(int_from_script_bytes(int_to_script_bytes(i)), i)
self.assertEqual(int_to_script_bytes(1), b"\1")
self.assertEqual(int_to_script_bytes(127), b"\x7f")
self.assertEqual(int_to_script_bytes(128), b"\x80\x00")
if __name__ == "__main__":
unittest.main()
| moocowmoo/pycoin | tests/tools_test.py | Python | mit | 4,283 |
import threading
import queue
from socket import *
import sys
import select
import signal
class Server():
def __init__(self):
self.IP = '127.0.0.1'
self.port = 21000
self.address = (self.IP, self.port)
self.socket = socket(AF_INET, SOCK_STREAM)
self.socket.setblocking(0)
self.socket.bind(self.address)
self.inputs = [ self.socket ]
self.outputs = []
self.output_buffer = []
self.clients = {}
#Register Ctrl-C Sighandler
signal.signal(signal.SIGINT, self.signal_handler)
#Listen for two client connections
try:
self.socket.listen(2)
except:
print('Error')
self.client_message_queues = {}
self.thread_continue = True
self.output_thread = threading.Thread(target=self.output)
#self.output_thread.start()
self.wait_for_input()
def server_shutdown(self):
message = ['c', 'SERVER_OFFLINE']
for client in self.clients:
self.clients[client].send(str(message).encode())
self.cleanup()
def cleanup(self):
self.thread_continue = False
self.socket.close()
if self.output_thread.is_alive():
self.output_thread.join()
if self.output_thread.is_alive():
print('ERROR: thread still alive')
else:
sys.exit()
def signal_handler(self, signum, frame):
print("\nSignal->", signum, frame)
#Send server shutdown command
self.server_shutdown()
def output(self):
while self.thread_continue:
try:
output_message = input()
print("Server message:", output_message)
self.process_server_message(output_message)
except KeyboardInterrupt:
self.server_shutdown()
def process_server_message(self, message):
pass
def wait_for_input(self):
print('Listening...')
while self.inputs:
#Wait for sockets to be ready for processing
readable, writable, exceptional = select.select(self.inputs,
self.outputs,
self.inputs, 0)
#Handle inputs
for s in readable:
if s is self.socket:
connection, client_address = s.accept()
connection.setblocking(0)
self.inputs.append(connection)
print("Client", connection.getpeername(),
"connected")
#Append connection to clients
if len(self.clients) == 0:
self.clients['a'] = connection
message = ['c', 'PEER_NOT_READY']
connection.send(str(message).encode())
else:
self.clients['b'] = connection
message = ['c', 'PEER_READY']
#Send peer ready command to both clients
for client in self.clients:
self.clients[client].send(str(message).encode())
#Give client a message queue
self.client_message_queues[connection] = queue.Queue()
else:
try:
data = s.recv(1024)
except:
message = ['c', 'PEER_DISCONNECTED']
for client in self.clients:
if s == self.clients[client]:
print("Client ", client,
" disconnected")
if client == 'a':
if 'b' in self.clients:
self.clients['b'].send(str(message).encode())
print("Sent:", str(message))
else:
if 'a' in self.clients:
self.clients['a'].send(str(message).encode())
print("Sent:", str(message))
break
#Remove connection from outputs and inputs
if s in self.outputs:
self.outputs.remove(s)
self.inputs.remove(s)
#Remove connection from readable and writable
if s in writable:
writable.remove(s)
readable.remove(s)
#Close and remove connection from client list
if 'a' in self.clients.keys():
if s == self.clients['a']:
self.clients['a'].close()
del self.clients['a']
if 'b' in self.clients.keys():
if s == self.clients['b']:
self.clients['b'].close()
del self.clients['b']
if data:
receiver = None
if 'a' in self.clients.keys():
if s == self.clients['a']:
if 'b' in self.clients.keys():
receiver = self.clients['b']
client_id = 'a'
if 'b' in self.clients.keys():
if s == self.clients['b']:
if 'a' in self.clients.keys():
receiver = self.clients['a']
client_id = 'b'
print("Received data from client ",
client_id, ":", data)
message = ['m', data.decode()]
if receiver != None:
self.client_message_queues[receiver].put(str(message).encode())
if receiver not in self.outputs:
#print('Appended ', s.getpeername())
self.outputs.append(receiver)
else:
pass
#Handle outputs
for s in writable:
try:
next_msg = self.client_message_queues[s].get_nowait()
except queue.Empty:
pass
else:
if s not in self.clients.values():
#Flush next_msg
pass
else:
print("Message sent ", next_msg.decode())
try:
s.send(next_msg)
except:
pass
#Handle exceptional conditions
for s in exceptional:
pass
if __name__ == '__main__':
server = Server()
| twilliams1832/Collab-Messenger | server.py | Python | mit | 7,538 |
#!/usr/bin/env python
"""\
SVG.py - Construct/display SVG scenes.
The following code is a lightweight wrapper around SVG files. The metaphor
is to construct a scene, add objects to it, and then write it to a file
to display it.
This program uses ImageMagick to display the SVG files. ImageMagick also
does a remarkable job of converting SVG files into other formats.
"""
import os
def colorstr(rgb):
if type(rgb) == tuple:
return "#%02x%02x%02x" % rgb
else:
return rgb
def compute_style(style):
color = style.get("color")
style_str = []
if color is None:
color="none"
style_str.append('fill:%s;' % (colorstr(color),))
style_str = 'style="%s"' % (';'.join(style_str),)
return style_str
class Scene:
def __init__(self, size=(400,400)):
self.items = []
self.size = size
def add(self,item):
self.items.append(item)
def strarray(self):
var = [
"<?xml version=\"1.0\"?>\n",
"<svg height=\"%d\" width=\"%d\" >\n" % (self.size[1],self.size[0]),
" <g style=\"fill-opacity:1.0; stroke:black;\n",
" stroke-width:1;\">\n"
]
for item in self.items: var += item.strarray()
var += [" </g>\n</svg>\n"]
return var
def write_svg(self, file):
file.writelines(self.strarray())
def _repr_html_(self):
return '\n'.join(self.strarray())
class Line:
def __init__(self,start,end):
self.start = start #xy tuple
self.end = end #xy tuple
def strarray(self):
return [" <line x1=\"%d\" y1=\"%d\" x2=\"%d\" y2=\"%d\" />\n" %\
(self.start[0],self.start[1],self.end[0],self.end[1])]
class Circle:
def __init__(self,center,radius, **style_kwargs):
self.center = center
self.radius = radius
self.style_kwargs = style_kwargs
def strarray(self):
style_str = compute_style(self.style_kwargs)
return [
" <circle cx=\"%d\" cy=\"%d\" r=\"%d\"\n" % (self.center[0], self.center[1], self.radius),
" %s />\n" % (style_str,)
]
class Rectangle:
def __init__(self, origin, size, **style_kwargs):
self.origin = origin
self.size = size
self.style_kwargs = style_kwargs
def strarray(self):
style_str = compute_style(self.style_kwargs)
return [
" <rect x=\"%d\" y=\"%d\" height=\"%d\"\n" % (self.origin[0], self.origin[1], self.size[1]),
" width=\"%d\" %s />\n" % (self.size[0], style_str)
]
class Text:
def __init__(self,origin,text,size=24):
self.origin = origin
self.text = text
self.size = size
return
def strarray(self):
return [" <text x=\"%d\" y=\"%d\" font-size=\"%d\">\n" %\
(self.origin[0],self.origin[1],self.size),
" %s\n" % self.text,
" </text>\n"]
def test():
scene = Scene()
scene.add(Rectangle((100,100),(200,200), **{"color":(0,255,255)} ))
scene.add(Line((200,200),(200,300)))
scene.add(Line((200,200),(300,200)))
scene.add(Line((200,200),(100,200)))
scene.add(Line((200,200),(200,100)))
scene.add(Circle((200,200),30, **{"color":(0,0,255)} ))
scene.add(Circle((200,300),30, **{"color":(0,255,0)} ))
scene.add(Circle((300,200),30, **{"color":(255,0,0)} ))
scene.add(Circle((100,200),30, **{"color":(255,255,0)} ))
scene.add(Circle((200,100),30, **{"color":(255,0,255)} ))
scene.add(Text((50,50),"Testing SVG"))
with open("test.svg", "w") as f:
scene.write_svg(f)
if __name__ == '__main__':
test()
| meppe/tensorflow-deepq | tf_rl/utils/svg.py | Python | mit | 3,688 |
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from google.ads.googleads.v10.enums.types import (
response_content_type as gage_response_content_type,
)
from google.ads.googleads.v10.resources.types import (
campaign_asset_set as gagr_campaign_asset_set,
)
from google.rpc import status_pb2 # type: ignore
__protobuf__ = proto.module(
package="google.ads.googleads.v10.services",
marshal="google.ads.googleads.v10",
manifest={
"MutateCampaignAssetSetsRequest",
"CampaignAssetSetOperation",
"MutateCampaignAssetSetsResponse",
"MutateCampaignAssetSetResult",
},
)
class MutateCampaignAssetSetsRequest(proto.Message):
r"""Request message for
[CampaignAssetSetService.MutateCampaignAssetSets][google.ads.googleads.v10.services.CampaignAssetSetService.MutateCampaignAssetSets].
Attributes:
customer_id (str):
Required. The ID of the customer whose
campaign asset sets are being modified.
operations (Sequence[google.ads.googleads.v10.services.types.CampaignAssetSetOperation]):
Required. The list of operations to perform
on individual campaign asset sets.
partial_failure (bool):
If true, successful operations will be
carried out and invalid operations will return
errors. If false, all operations will be carried
out in one transaction if and only if they are
all valid. Default is false.
validate_only (bool):
If true, the request is validated but not
executed. Only errors are returned, not results.
response_content_type (google.ads.googleads.v10.enums.types.ResponseContentTypeEnum.ResponseContentType):
The response content type setting. Determines
whether the mutable resource or just the
resource name should be returned post mutation.
"""
customer_id = proto.Field(proto.STRING, number=1,)
operations = proto.RepeatedField(
proto.MESSAGE, number=2, message="CampaignAssetSetOperation",
)
partial_failure = proto.Field(proto.BOOL, number=3,)
validate_only = proto.Field(proto.BOOL, number=4,)
response_content_type = proto.Field(
proto.ENUM,
number=5,
enum=gage_response_content_type.ResponseContentTypeEnum.ResponseContentType,
)
class CampaignAssetSetOperation(proto.Message):
r"""A single operation (create, remove) on a campaign asset set.
This message has `oneof`_ fields (mutually exclusive fields).
For each oneof, at most one member field can be set at the same time.
Setting any member of the oneof automatically clears all other
members.
.. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields
Attributes:
create (google.ads.googleads.v10.resources.types.CampaignAssetSet):
Create operation: No resource name is
expected for the new campaign asset set.
This field is a member of `oneof`_ ``operation``.
remove (str):
Remove operation: A resource name for the removed campaign
asset set is expected, in this format:
``customers/{customer_id}/campaignAssetSets/{campaign_id}~{asset_set_id}``
This field is a member of `oneof`_ ``operation``.
"""
create = proto.Field(
proto.MESSAGE,
number=1,
oneof="operation",
message=gagr_campaign_asset_set.CampaignAssetSet,
)
remove = proto.Field(proto.STRING, number=2, oneof="operation",)
class MutateCampaignAssetSetsResponse(proto.Message):
r"""Response message for a campaign asset set mutate.
Attributes:
results (Sequence[google.ads.googleads.v10.services.types.MutateCampaignAssetSetResult]):
All results for the mutate.
partial_failure_error (google.rpc.status_pb2.Status):
Errors that pertain to operation failures in the partial
failure mode. Returned only when partial_failure = true and
all errors occur inside the operations. If any errors occur
outside the operations (e.g. auth errors), we return an RPC
level error.
"""
results = proto.RepeatedField(
proto.MESSAGE, number=1, message="MutateCampaignAssetSetResult",
)
partial_failure_error = proto.Field(
proto.MESSAGE, number=2, message=status_pb2.Status,
)
class MutateCampaignAssetSetResult(proto.Message):
r"""The result for the campaign asset set mutate.
Attributes:
resource_name (str):
Returned for successful operations.
campaign_asset_set (google.ads.googleads.v10.resources.types.CampaignAssetSet):
The mutated campaign asset set with only mutable fields
after mutate. The field will only be returned when
response_content_type is set to "MUTABLE_RESOURCE".
"""
resource_name = proto.Field(proto.STRING, number=1,)
campaign_asset_set = proto.Field(
proto.MESSAGE,
number=2,
message=gagr_campaign_asset_set.CampaignAssetSet,
)
__all__ = tuple(sorted(__protobuf__.manifest))
| googleads/google-ads-python | google/ads/googleads/v10/services/types/campaign_asset_set_service.py | Python | apache-2.0 | 5,820 |
__all__ = ['agent', 'jaeger']
| census-instrumentation/opencensus-python | contrib/opencensus-ext-jaeger/opencensus/ext/jaeger/trace_exporter/gen/jaeger/__init__.py | Python | apache-2.0 | 30 |
#!/usr/bin/env python
import switchlight_api
import time
import argparse
parser = argparse.ArgumentParser(prog='switchlight-cli', description='Switchlight CLI Client')
parser.add_argument('server', type=str, help='IP or hostname of the Switchlight server')
parser.add_argument('port', type=str, default='25500', nargs='?', help='Optional port number of Switchlight server')
parser.add_argument('--query', '-q', action='store_true', help='Queries status of Switchlight server')
parser.add_argument('--toggle', '-t', type=str, nargs=1, action='append', help='Toggles the specified switch')
parser.add_argument('--on', '-o', type=str, nargs=1, action='append', help='Turns the specified switch on')
parser.add_argument('--off', '-f', type=str, nargs=1, action='append', help='Turns the specified switch off')
parser.add_argument('--lock', '-l', action='store_true', help='Locks the Switchlight server')
parser.add_argument('--unlock', '-u', type=str, nargs='?', help='Unlocks the Switchlight server using the specified passcode')
parser.add_argument('--set-timer', '-s', type=int, nargs='?', help='Sets a timer, in minutes, which performs the actions specified on command line')
args = vars(parser.parse_args())
try:
client = switchlight_api.Client(args['server'], int(args['port']))
while True:
time.sleep(0.1)
client.update()
if client.get_connected(): break
if args['set_timer']:
if args['toggle']:
print("--toggle cannot be used with --set-timer, use --on or --off instead")
quit()
action = {}
if args['on']:
for switch in args['on']: action[switch[0]] = client.get_switch(switch[0]).states[-1]
if args['off']:
for switch in args['off']: action[switch[0]] = client.get_switch(switch[0]).states[0]
client.set_timer(time.time() + args['set_timer'] * 60, action, args['lock'])
print('Timer set.')
elif args['lock']:
client.lock()
elif args['unlock']:
if client.unlock(args['unlock']):
print('Switchlight server unlocked successfully.')
else:
print('Incorrect passcode.')
elif args.get('on') or args.get('off') or args.get('toggle'):
if client.get_locked():
print('Switchlight server is locked, use --unlock [passcode] to unlock.')
quit()
if args['toggle']:
for s in args['toggle']:
switch = client.get_switch(s[0])
if switch.status > 0:
switch.set(switch.states[0])
else:
switch.set(switch.states[-1])
if args['on']:
for s in args['on']:
switch = client.get_switch(s[0])
switch.set(switch.states[-1])
if args['off']:
for s in args['off']:
switch = client.get_switch(s[0])
switch.set(switch.states[0])
time.sleep(0.25)
client.update()
if args['query']:
switches = client.get_switches()
for switch in switches.values():
print(switch.name + ': ' + switch.states[switch.status])
for timer in client.get_timers().values():
print('Timer ' + str(timer.id) + ', ' + time.strftime('%I:%M:%S %p', time.localtime(timer.time)) + ':')
for a in timer.action.items():
print('\tSet ' + a[0] + ' ' + a[1])
if timer.lock: print('\tLock Switchlight')
locked = 'locked' if client.get_locked() else 'unlocked'
print('Switchlight is ' + locked)
except:
client.disconnect()
raise
client.disconnect()
| hunternet93/switchlight | switchlight-cli.py | Python | mit | 3,672 |
"""
Featurization package of the Lung Cancer Action Team toolkit.
"""
from __future__ import absolute_import
# Import registry
from . import registry
# Import featurization modules
from . import body_depth
from . import center
from . import characteristics
from . import region_properties
from . import tracheal_distance
# Import registry featurization functions
from .registry import featurize_scan, featurize_scan_single
| connorbrinton/lcat | lcat/featurization/__init__.py | Python | gpl-3.0 | 426 |
# CS4243: Computer Vision and Pattern Recognition
# Zhou Bin
# 29th, Oct, 2014
import numpy as np
from Vertex import Vertex
class Polygon:
def __init__(self, newVertexList, newTexelList):
# Create list to store all vertex
self.Vertex = []
for i in newVertexList:
self.Vertex.append(i)
# Create list to store all texel value
self.Texel = []
for i in newTexelList:
self.Texel.append(i)
| WuPei/cv_reconstructor | Polygon.py | Python | mit | 486 |
# (C) Copyright 2014-2017 Hewlett Packard Enterprise Development LP
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Persister Module
The Persister reads metrics and alarms from Kafka and then stores them
in into either Influxdb or Cassandra
Start the perister as stand-alone process by running 'persister.py
--config-file <config file>'
"""
import multiprocessing
import os
import signal
import sys
import time
from monasca_common.simport import simport
from oslo_config import cfg
from oslo_log import log
from monasca_persister.repositories import persister
LOG = log.getLogger(__name__)
zookeeper_opts = [cfg.StrOpt('uri'),
cfg.IntOpt('partition_interval_recheck_seconds')]
zookeeper_group = cfg.OptGroup(name='zookeeper', title='zookeeper')
cfg.CONF.register_group(zookeeper_group)
cfg.CONF.register_opts(zookeeper_opts, zookeeper_group)
kafka_common_opts = [cfg.StrOpt('uri'),
cfg.StrOpt('group_id'),
cfg.StrOpt('topic'),
cfg.StrOpt('consumer_id'),
cfg.StrOpt('client_id'),
cfg.IntOpt('database_batch_size'),
cfg.IntOpt('max_wait_time_seconds'),
cfg.IntOpt('fetch_size_bytes'),
cfg.IntOpt('buffer_size'),
cfg.IntOpt('max_buffer_size'),
cfg.StrOpt('zookeeper_path'),
cfg.IntOpt('num_processors')]
kafka_metrics_opts = kafka_common_opts
kafka_alarm_history_opts = kafka_common_opts
kafka_metrics_group = cfg.OptGroup(name='kafka_metrics', title='kafka_metrics')
kafka_alarm_history_group = cfg.OptGroup(name='kafka_alarm_history',
title='kafka_alarm_history')
cfg.CONF.register_group(kafka_metrics_group)
cfg.CONF.register_group(kafka_alarm_history_group)
cfg.CONF.register_opts(kafka_metrics_opts, kafka_metrics_group)
cfg.CONF.register_opts(kafka_alarm_history_opts, kafka_alarm_history_group)
repositories_opts = [
cfg.StrOpt('metrics_driver', help='The repository driver to use for metrics'),
cfg.StrOpt('alarm_state_history_driver', help='The repository driver to use for alarm state history')]
repositories_group = cfg.OptGroup(name='repositories', title='repositories')
cfg.CONF.register_group(repositories_group)
cfg.CONF.register_opts(repositories_opts, repositories_group)
processors = [] # global list to facilitate clean signal handling
exiting = False
def clean_exit(signum, frame=None):
"""Exit all processes attempting to finish uncommitted active work before exit.
Can be called on an os signal or no zookeeper losing connection.
"""
global exiting
if exiting:
# Since this is set up as a handler for SIGCHLD when this kills one
# child it gets another signal, the global exiting avoids this running
# multiple times.
LOG.debug('Exit in progress clean_exit received additional signal %s' % signum)
return
LOG.info('Received signal %s, beginning graceful shutdown.' % signum)
exiting = True
wait_for_exit = False
for process in processors:
try:
if process.is_alive():
process.terminate() # Sends sigterm which any processes after a notification is sent attempt to handle
wait_for_exit = True
except Exception: # nosec
# There is really nothing to do if the kill fails, so just go on.
# The # nosec keeps bandit from reporting this as a security issue
pass
# wait for a couple seconds to give the subprocesses a chance to shut down correctly.
if wait_for_exit:
time.sleep(2)
# Kill everything, that didn't already die
for child in multiprocessing.active_children():
LOG.debug('Killing pid %s' % child.pid)
try:
os.kill(child.pid, signal.SIGKILL)
except Exception: # nosec
# There is really nothing to do if the kill fails, so just go on.
# The # nosec keeps bandit from reporting this as a security issue
pass
if signum == signal.SIGTERM:
sys.exit(0)
sys.exit(signum)
def start_process(respository, kafka_config):
LOG.info("start process: {}".format(respository))
m_persister = persister.Persister(kafka_config, cfg.CONF.zookeeper,
respository)
m_persister.run()
def main():
log.register_options(cfg.CONF)
log.set_defaults()
cfg.CONF(sys.argv[1:], project='monasca', prog='persister')
log.setup(cfg.CONF, "monasca-persister")
"""Start persister."""
metric_repository = simport.load(cfg.CONF.repositories.metrics_driver)
alarm_state_history_repository = simport.load(cfg.CONF.repositories.alarm_state_history_driver)
# Add processors for metrics topic
for proc in range(0, cfg.CONF.kafka_metrics.num_processors):
processors.append(multiprocessing.Process(
target=start_process, args=(metric_repository, cfg.CONF.kafka_metrics)))
# Add processors for alarm history topic
for proc in range(0, cfg.CONF.kafka_alarm_history.num_processors):
processors.append(multiprocessing.Process(
target=start_process, args=(alarm_state_history_repository, cfg.CONF.kafka_alarm_history)))
# Start
try:
LOG.info('''
_____
/ \ ____ ____ _____ ______ ____ _____
/ \ / \ / _ \ / \\\__ \ / ___// ___\\\__ \\
/ Y ( <_> ) | \/ __ \_\___ \\ \___ / __ \\_
\____|__ /\____/|___| (____ /____ >\___ >____ /
\/ \/ \/ \/ \/ \/
__________ .__ __
\______ \ ___________ _____|__| _______/ |_ ___________
| ___// __ \_ __ \/ ___/ |/ ___/\ __\/ __ \_ __ \\
| | \ ___/| | \/\___ \| |\___ \ | | \ ___/| | \/
|____| \___ >__| /____ >__/____ > |__| \___ >__|
\/ \/ \/ \/
''')
for process in processors:
process.start()
# The signal handlers must be added after the processes start otherwise
# they run on all processes
signal.signal(signal.SIGCHLD, clean_exit)
signal.signal(signal.SIGINT, clean_exit)
signal.signal(signal.SIGTERM, clean_exit)
while True:
time.sleep(10)
except Exception:
LOG.exception('Error! Exiting.')
clean_exit(signal.SIGKILL)
if __name__ == "__main__":
sys.exit(main())
| sapcc/monasca-persister | monasca_persister/persister.py | Python | apache-2.0 | 7,214 |
import os
from codeink.parchment import pkginfo
def test_get_directories():
cwd = os.path.dirname(__file__)
parent_dir = os.path.dirname(cwd)
pig_path = os.path.join(parent_dir, 'guinea-pig')
correct_value = set([pig_path,
os.path.join(pig_path, 'cage1'),
os.path.join(pig_path, 'cage2')])
# check getting all possible directories with .py extension files
assert correct_value == set(pkginfo.get_directories(pig_path, '.py'))
# check that an empty iterable is returned for an empty directory
cage3_dir = os.path.join(pig_path, 'cage3')
assert set() == set(pkginfo.get_directories(cage3_dir))
def test_is_package():
cwd = os.path.dirname(__file__)
parent_dir = os.path.dirname(cwd)
pig_path = os.path.join(parent_dir, 'guinea-pig')
# test correct package recognizition
assert True == pkginfo.is_package(pig_path)
# test correct non-package recognizition
pig3_dir = os.path.join(pig_path, 'cage3')
assert False == pkginfo.is_package(pig3_dir)
def test_get_modules():
cwd = os.path.dirname(__file__)
parent_dir = os.path.dirname(cwd)
pig_path = os.path.join(parent_dir, 'guinea-pig')
dirs = pkginfo.get_directories(pig_path)
# test correct module recognizition
correct_value = set([os.path.join(pig_path, 'cage1', 'pig1.py'),
os.path.join(pig_path, 'cage2', 'pig2.py'),
os.path.join(pig_path, 'lab_assistant.py')])
assert correct_value == set(pkginfo.get_modules(dirs))
# test correct non-module recognizition
cage3_dir = [os.path.join(pig_path, 'cage3')]
assert set() == set(pkginfo.get_modules(cage3_dir))
def test_filter_modules():
cwd = os.path.dirname(__file__)
parent_dir = os.path.dirname(cwd)
pig_path = os.path.join(parent_dir, 'guinea-pig')
dirs = pkginfo.get_directories(pig_path)
# test not matching module paths
modules = list(pkginfo.get_modules(dirs))
patterns = ['*foo*'] # nothing to filter
filtered_modules = pkginfo.filter_modules(modules, patterns)
assert modules == filtered_modules
# test matching module paths
assert [] == pkginfo.filter_modules(modules, ['*test*'])
def test_find_root_pkg():
cwd = os.path.dirname(__file__)
parent_dir = os.path.dirname(cwd)
pig_path = os.path.join(parent_dir, 'guinea-pig')
pig1_path = os.path.join(pig_path, 'cage1', 'pig1.py')
assert parent_dir == pkginfo.find_root_pkg(pig1_path)
| carocad/CodeInk | tests/test_parchment/test_pkginfo.py | Python | apache-2.0 | 2,514 |
# morgainemoviedb -- a tool to organize your local movies
# Copyright 2010 Marc Egli
#
# This file is part of morgainemoviedb.
#
# morgainemoviedb is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# morgainemoviedb is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with morgainemoviedb. If not, see <http://www.gnu.org/licenses/>.
from django.conf import settings
MOVIE_BASE_DIR = getattr(settings, 'MDB_MOVIE_BASE_DIR', '/filme')
FOLDER_TYPE_CHOICES = getattr(settings, 'MDB_FOLDER_TYPE_CHOICES',(
(1,'Movies'),
(2,'Series'),
))
FOLDER_SCAN_MODE = getattr(settings, 'MDB_FOLDER_SCAN_MODE',(
(1,'Flat'),
(2,'Recursive'),
(3,'Don\'t add Movies'),
))
SCAN_EXCLUDE_FILES = getattr(settings, 'MDB_EXCLUDE_FILES',(
'^\.',
))
MOVIE_FILE_SUFFIXES = getattr(settings, 'MDB_MOVIE_FILE_SUFFIXES',{
'.avi':'movie',
'.mkv':'movie',
'.mov':'movie',
'.mp3':'sound',
})
POSTER_THUMBSIZE = getattr(settings, 'MDB_POSTER_THUMBSIZE',{
'x':300,
'y':300,
})
SCRAPPER = getattr(settings, 'MDB_SCRAPPER', 'tmdb_scrapper')
| frog32/morgainemoviedb | morgainemoviedb/moviedb/conf/settings.py | Python | agpl-3.0 | 1,523 |
# -*- coding: utf-8 -*-
import pytest
from marshmallow.exceptions import ValidationError, MarshallingError, UnmarshallingError
from marshmallow import fields, Schema
class TestValidationError:
def test_stores_message_in_list(self):
err = ValidationError('foo')
assert err.messages == ['foo']
def test_can_pass_list_of_messages(self):
err = ValidationError(['foo', 'bar'])
assert err.messages == ['foo', 'bar']
def test_stores_dictionaries(self):
messages = {'user': {'email': ['email is invalid']}}
err = ValidationError(messages)
assert err.messages == messages
def test_can_store_field_names(self):
err = ValidationError('invalid email', field_names='email')
assert err.field_names == ['email']
err = ValidationError('invalid email', field_names=['email'])
assert err.field_names == ['email']
def test_str(self):
err = ValidationError('invalid email')
assert str(err) == 'invalid email'
err2 = ValidationError('invalid email', 'email')
assert str(err2) == 'invalid email'
class TestMarshallingError:
def test_deprecated(self):
pytest.deprecated_call(MarshallingError, 'foo')
def test_can_store_field_and_field_name(self):
field_name = 'foo'
field = fields.Str()
err = MarshallingError('something went wrong', fields=[field],
field_names=[field_name])
assert err.fields == [field]
assert err.field_names == [field_name]
def test_can_be_raised_by_custom_field(self):
class MyField(fields.Field):
def _serialize(self, val, attr, obj):
raise MarshallingError('oops')
class MySchema(Schema):
foo = MyField()
s = MySchema()
result = s.dump({'foo': 42})
assert 'foo' in result.errors
assert result.errors['foo'] == ['oops']
class TestUnmarshallingError:
def test_deprecated(self):
pytest.deprecated_call(UnmarshallingError, 'foo')
def test_can_store_field_and_field_name(self):
field_name = 'foo'
field = fields.Str()
err = UnmarshallingError('something went wrong', fields=[field],
field_names=[field_name])
assert err.fields == [field]
assert err.field_names == [field_name]
def test_can_be_raised_by_validator(self):
def validator(val):
raise UnmarshallingError('oops')
class MySchema(Schema):
foo = fields.Field(validate=[validator])
s = MySchema()
result = s.load({'foo': 42})
assert 'foo' in result.errors
assert result.errors['foo'] == ['oops']
| mwstobo/marshmallow | tests/test_exceptions.py | Python | mit | 2,753 |
#!/usr/bin/env python
# -*- coding: utf8 -*-
# *****************************************************************
# ** PTS -- Python Toolkit for working with SKIRT **
# ** © Astronomical Observatory, Ghent University **
# *****************************************************************
## \package pts.magic.tools.statistics Provides statistical functions.
# -----------------------------------------------------------------
# Ensure Python 3 functionality
from __future__ import absolute_import, division, print_function
# Import standard modules
import copy
import math
import numpy as np
# Import astronomical modules
from astropy.stats import sigma_clip, sigma_clipped_stats
# Import the relevant PTS classes and modules
from . import general
from ..basics.mask import Mask
# -----------------------------------------------------------------
# Calculate sigma-to-FWHM and FWHM-to-sigma conversion factors
sigma_to_fwhm = (8 * np.log(2))**0.5
fwhm_to_sigma = 1.0 / sigma_to_fwhm
# -----------------------------------------------------------------
def histogram(data, nbins):
"""
This function ...
:param data:
:param nbins:
:return:
"""
# Get the bins
freq, edges = np.histogram(data, nbins)
nbins = len(edges) - 1
# Get the bin centers
centers = []
for i in range(nbins): centers.append(0.5 * (edges[i] + edges[i + 1]))
# Get the bin widths
# widths = []
# for i in range(len(edges) - 1):
# widths.append(edges[i + 1] - edges[i])
# Get the lower limits of the bins
lower = []
for i in range(nbins): lower.append(edges[i])
# Get the upper limits of the bins
upper = []
for i in range(nbins): upper.append(edges[i + 1])
# Return the bin properties
return centers, lower, upper
# -----------------------------------------------------------------
def sigma_clip_mask_list(data, sigma=3.0, mask=None):
"""
This function ...
:param data:
:param sigma:
:param mask:
:return:
"""
masked_list = sigma_clip(data, sigma=sigma, iters=None, copy=False)
new_mask = copy.deepcopy(mask) if mask is not None else [0]*len(data)
for i, masked in enumerate(masked_list.mask):
if masked: new_mask[i] = True
# Return the new or updated mask
return new_mask
# -----------------------------------------------------------------
def sigma_clip_mask(data, sigma_level=3.0, mask=None, niters=None):
"""
This function ...
:param data:
:param sigma_level:
:param mask:
:param niters: None means till convergence is achieved
:return:
"""
# Split the x, y and z values of the data, without the masked values
x_values, y_values, z_values = general.split_xyz(data, mask=mask)
# Sigma-clip z-values that are outliers
masked_z_values = sigma_clip(z_values, sigma=sigma_level, iters=niters, copy=False)
# Copy the mask or create a new one if none was provided
new_mask = copy.deepcopy(mask) if mask is not None else Mask(np.zeros_like(data))
for i, masked in enumerate(masked_z_values.mask):
if masked:
x = x_values[i]
y = y_values[i]
new_mask[y,x] = True
#if not isinstance(new_mask, Mask): print(new_mask, mask)
# Assert the mask is of type 'Mask'
from ..core.mask import Mask as newMask
assert isinstance(new_mask, Mask) or isinstance(new_mask, newMask)
# Return the new or updated mask
return new_mask
# -----------------------------------------------------------------
def sigma_clipped_median(data, sigma=3.0, mask=None):
"""
This function ...
:param data:
:param sigma:
:param mask:
:return:
"""
# Calculate the sigma-clipped mean and median
_, median, _ = sigma_clipped_stats(data, mask=mask, sigma=sigma)
# Return the median value
return median
# -----------------------------------------------------------------
def sigma_clipped_statistics(data, sigma=3.0, mask=None):
"""
This function ...
:param data:
:param sigma:
:param mask:
:return:
"""
# Calculate the sigma-clipped mean and median
mean, median, stddev = sigma_clipped_stats(data, mask=mask, sigma=sigma)
# Return the statistical parameters
return mean, median, stddev
# -----------------------------------------------------------------
def sigma_clip_split(input_list, criterion, sigma=3.0, only_high=False, only_low=False, nans="low"):
"""
This function ...
:param input_list:
:param criterion:
:param sigma:
:param only_high:
:param only_low:
:param nans:
:return:
"""
# Initialize an empty list of widths
determinants = []
# Loop over all the star candidates and calculate their width
for item in input_list: determinants.append(criterion(item))
# Use sigma clipping to seperate stars and unidentified objects
mask = sigma_clip_mask_list(determinants, sigma=sigma)
# Calculate the mean value of the determinants that are not masked
mean = np.ma.mean(np.ma.masked_array(determinants, mask=mask))
# Create a seperate list for the stars and for the ufos
valid_list = []
invalid_list = []
# Loop over all items in the input list, putting them in either the valid or invalid list
for index, item in enumerate(input_list):
value = criterion(item)
if only_high:
if mask[index] and value > mean: invalid_list.append(item)
else: valid_list.append(item)
elif only_low:
if mask[index] and value < mean: invalid_list.append(item)
else: valid_list.append(item)
else:
if mask[index]: invalid_list.append(item)
else: valid_list.append(item)
# Return the valid and invalid lists
return valid_list, invalid_list
# -----------------------------------------------------------------
def cutoff(values, method, limit):
"""
This function ...
:param values:
:param method:
:param limit:
"""
# Percentage method
if method == "percentage":
# Create a sorted list for the input values
sorted_values = sorted(values)
# Determine the splitting point
split = (1.0-limit) * len(sorted_values)
index = int(round(split))
# Return the corresponding value in the sorted list
return sorted_values[index]
# Sigma-clipping method
elif method == "sigma_clip":
# Perform sigma clipping on the input list
masked_values = sigma_clip(np.array(values), sigma=limit, iters=None, copy=False)
# Calculate the maximum of the masked array
return np.ma.max(masked_values)
else: raise ValueError("Invalid cutoff method (must be 'percentage' or 'sigma_clip'")
# -----------------------------------------------------------------
def inverse_gaussian(center, sigma, amplitude, y):
"""
This function ...
:param center:
:param sigma:
:param amplitude:
:param y:
:return:
"""
x = np.sqrt( - 2. * sigma**2 * np.log(y * np.sqrt(2. * math.pi * sigma**2) / amplitude)) + center
return x
# -----------------------------------------------------------------
def gaussian(center, sigma, amplitude, x):
"""
This function ...
:param center:
:param sigma:
:param amplitude:
:param x:
:return:
"""
normal = 1. / np.sqrt(2. * math.pi * sigma**2 ) * np.exp( - ( x - center)**2 / (2. * sigma**2))
return normal * amplitude
# -----------------------------------------------------------------
def test_inverse(x, center, sigma, amplitude):
"""
This function ...
:param x:
:param center:
:param sigma:
:param amplitude:
:return:
"""
y = gaussian(center, sigma, amplitude, x)
return inverse_gaussian(center, sigma, amplitude, y)
# -----------------------------------------------------------------
| SKIRT/PTS | magic/tools/statistics.py | Python | agpl-3.0 | 8,006 |
# generated from catkin/cmake/template/order_packages.context.py.in
source_root_dir = "/opt/geofrenzy/src/catkin_ws/src"
whitelisted_packages = "".split(';') if "" != "" else []
blacklisted_packages = "".split(';') if "" != "" else []
underlay_workspaces = "/opt/ros/kinetic".split(';') if "/opt/ros/kinetic" != "" else []
| geofrenzy/utm-mbsb | ros-src/catkin_ws/build/catkin_generated/order_packages.py | Python | apache-2.0 | 323 |
# -*- coding: utf-8 -*-
# © 2016 Eficent Business and IT Consulting Services S.L.
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
from . import models
| factorlibre/stock-logistics-warehouse | stock_account_change_product_valuation/__init__.py | Python | agpl-3.0 | 174 |
#!/usr/bin/python
import os
for kernel in ['plain', 'blas']:
for k in range(4,25):
cmd = './axpy ' + str(2**k) + ' ' + str(2**(30-k)) + ' ' + kernel
os.system(cmd)
| lothian/S2I2 | core/run_axpy.py | Python | gpl-2.0 | 187 |
from typing import Optional
from common.models.executor import ExecutorInfo
from common.models.resource import ResourceInfoList
from common.models.state import TaskState
from util.config import Config, ConfigField, BaseConfig, IncorrectFieldType, DateTimeField
class TaskReturnCode(BaseConfig):
def __init__(self, retcode: int = None, **kwargs):
super().__init__(**kwargs)
self._retcode = retcode
@property
def retcode(self) -> 'Optional[int]':
return self._retcode
def set_retcode(self, retcode: int):
self._retcode = retcode
def to_json(self):
return self.retcode
def from_json(self, json_doc: int, skip_unknown_fields=False):
if not isinstance(json_doc, int) and json_doc is not None:
raise IncorrectFieldType(
'{}: TaskReturnCode can be constructed only from int - {} passed.'.format(self.path_to_node,
json_doc.__class__.__name__))
self._retcode = json_doc
return self
def verify(self):
assert isinstance(self._retcode, int) or self._retcode is None, \
'{}: Return code should be int or None, but it is {}'.format(self.path_to_node,
self._retcode.__class__.__name__)
class TaskExecutionInfo(Config):
state = TaskState()
retcode = TaskReturnCode()
prep_start_time = DateTimeField()
prep_finish_time = DateTimeField()
prep_msg = ConfigField(type=str, required=False, default=None)
start_time = DateTimeField()
finish_time = DateTimeField()
def start_preparation(self):
self.state.change_state(TaskState.preparing)
self.prep_start_time.set_to_now()
def finish_preparation(self, success: bool, prep_msg: str = 'OK', is_initiated_by_user: bool = False):
new_status = TaskState.prepared if success else TaskState.prepfailed
if is_initiated_by_user:
new_status = TaskState.stopped
self.state.change_state(new_status)
self.prep_msg = prep_msg
self.prep_finish_time.set_to_now()
def start_execution(self):
self.state.change_state(TaskState.running)
self.start_time.set_to_now()
def finish_execution(self, retcode: int, is_initiated_by_user: bool = False):
self.finish_time.set_to_now()
self.retcode.set_retcode(retcode)
if retcode == 0 and not is_initiated_by_user:
self.state.change_state(TaskState.finished)
else:
self.state.change_state(TaskState.stopped if is_initiated_by_user else TaskState.failed)
class TaskStruct(Config):
resources = ResourceInfoList()
executor = ExecutorInfo()
class TaskInfo(Config):
task_id = ConfigField(type=str, required=False, default=None)
structure = TaskStruct()
exec_stats = TaskExecutionInfo()
| LuckyGeck/dedalus | common/models/task.py | Python | mit | 2,955 |
# -*-coding:Utf-8 -*
# Copyright (c) 2014 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Ce package contient les objets cherchables du module.
Modules :
familier -- cherchable familier
"""
from . import familier
| stormi/tsunami | src/secondaires/familier/cherchables/__init__.py | Python | bsd-3-clause | 1,695 |
from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'KoolfitMeter.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
)
| kionetworks/KoolfitMeter | KoolfitMeter/urls.py | Python | apache-2.0 | 281 |
default_app_config = 'voters.apps.VotersConfig'
| psephologic/everyonevoting | everyonevoting/voters/__init__.py | Python | agpl-3.0 | 48 |
"""Undocumented Module"""
__all__ = ['Transitions']
from panda3d.core import *
from direct.gui.DirectGui import *
from direct.interval.LerpInterval import LerpColorScaleInterval, LerpColorInterval, LerpScaleInterval, LerpPosInterval
from direct.interval.MetaInterval import Sequence, Parallel
from direct.interval.FunctionInterval import Func
class Transitions:
# These may be reassigned before the fade or iris transitions are
# actually invoked to change the models that will be used.
IrisModelName = "models/misc/iris"
FadeModelName = "models/misc/fade"
def __init__(self, loader,
model=None,
scale=3.0,
pos=Vec3(0, 0, 0)):
self.transitionIval = None
self.letterboxIval = None
self.iris = None
self.fade = None
self.letterbox = None
self.fadeModel = model
self.imagePos = pos
if model:
self.alphaOff = Vec4(1, 1, 1, 0)
self.alphaOn = Vec4(1, 1, 1, 1)
model.setTransparency(1)
self.lerpFunc = LerpColorScaleInterval
else:
self.alphaOff = Vec4(0, 0, 0, 0)
self.alphaOn = Vec4(0, 0, 0, 1)
self.lerpFunc = LerpColorInterval
self.irisTaskName = "irisTask"
self.fadeTaskName = "fadeTask"
self.letterboxTaskName = "letterboxTask"
def __del__(self):
if self.fadeModel:
self.fadeModel.removeNode()
self.fadeModel = None
##################################################
# Fade
##################################################
# We can set a custom model for the fade before using it for the first time
def setFadeModel(self, model, scale=1.0):
self.fadeModel = model
# We have to change some default parameters for a custom fadeModel
self.alphaOn = Vec4(1, 1, 1, 1)
# Reload fade if its already been created
if self.fade:
self.fade.destroy()
self.fade = None
self.loadFade()
def loadFade(self):
if self.fade is None:
# We create a DirectFrame for the fade polygon, instead of
# simply loading the polygon model and using it directly,
# so that it will also obscure mouse events for objects
# positioned behind it.
self.fade = DirectFrame(
parent = hidden,
guiId = 'fade',
relief = None,
image = self.fadeModel,
image_scale = (4, 2, 2),
state = DGG.NORMAL,
)
if not self.fadeModel:
# No fade model was given, so we make this the fade model.
self.fade["relief"] = DGG.FLAT
self.fade["frameSize"] = (-2, 2, -1, 1)
self.fade["frameColor"] = (0, 0, 0, 1)
self.fade.setTransparency(TransparencyAttrib.MAlpha)
self.fade.setBin('unsorted', 0)
self.fade.setColor(0,0,0,0)
def getFadeInIval(self, t=0.5, finishIval=None):
"""
Returns an interval without starting it. This is particularly useful in
cutscenes, so when the cutsceneIval is escaped out of we can finish the fade immediately
"""
#self.noTransitions() masad: this creates a one frame pop, is it necessary?
self.loadFade()
transitionIval = Sequence(Func(self.fade.reparentTo, aspect2d, FADE_SORT_INDEX),
Func(self.fade.showThrough), # in case aspect2d is hidden for some reason
self.lerpFunc(self.fade, t,
self.alphaOff,
# self.alphaOn,
),
Func(self.fade.detachNode),
name = self.fadeTaskName,
)
if finishIval:
transitionIval.append(finishIval)
return transitionIval
def getFadeOutIval(self, t=0.5, finishIval=None):
"""
Create a sequence that lerps the color out, then
parents the fade to hidden
"""
self.noTransitions()
self.loadFade()
transitionIval = Sequence(Func(self.fade.reparentTo,aspect2d,FADE_SORT_INDEX),
Func(self.fade.showThrough), # in case aspect2d is hidden for some reason
self.lerpFunc(self.fade, t,
self.alphaOn,
# self.alphaOff,
),
name = self.fadeTaskName,
)
if finishIval:
transitionIval.append(finishIval)
return transitionIval
def fadeIn(self, t=0.5, finishIval=None):
"""
Play a fade in transition over t seconds.
Places a polygon on the aspect2d plane then lerps the color
from black to transparent. When the color lerp is finished, it
parents the fade polygon to hidden.
"""
gsg = base.win.getGsg()
if gsg:
# If we're about to fade in from black, go ahead and
# preload all the textures etc.
base.graphicsEngine.renderFrame()
render.prepareScene(gsg)
render2d.prepareScene(gsg)
if (t == 0):
# Fade in immediately with no lerp
#print "transitiosn: fadeIn 0.0"
self.noTransitions()
self.loadFade()
self.fade.detachNode()
else:
# Create a sequence that lerps the color out, then
# parents the fade to hidden
self.transitionIval = self.getFadeInIval(t, finishIval)
self.transitionIval.start()
def fadeOut(self, t=0.5, finishIval=None):
"""
Play a fade out transition over t seconds.
Places a polygon on the aspect2d plane then lerps the color
from transparent to full black. When the color lerp is finished,
it leaves the fade polygon covering the aspect2d plane until you
fadeIn or call noFade.
lerp
"""
if (t == 0):
# Fade out immediately with no lerp
self.noTransitions()
self.loadFade()
self.fade.reparentTo(aspect2d, FADE_SORT_INDEX)
self.fade.setColor(self.alphaOn)
elif ConfigVariableBool('no-loading-screen', False):
if finishIval:
self.transitionIval = finishIval
self.transitionIval.start()
else:
# Create a sequence that lerps the color out, then
# parents the fade to hidden
self.transitionIval = self.getFadeOutIval(t,finishIval)
self.transitionIval.start()
def fadeOutActive(self):
return self.fade and self.fade.getColor()[3] > 0
def fadeScreen(self, alpha=0.5):
"""
Put a semitransparent screen over the camera plane
to darken out the world. Useful for drawing attention to
a dialog box for instance
"""
#print "transitiosn: fadeScreen"
self.noTransitions()
self.loadFade()
self.fade.reparentTo(aspect2d, FADE_SORT_INDEX)
self.fade.setColor(self.alphaOn[0],
self.alphaOn[1],
self.alphaOn[2],
alpha)
def fadeScreenColor(self, color):
"""
Put a semitransparent screen over the camera plane
to darken out the world. Useful for drawing attention to
a dialog box for instance
"""
#print "transitiosn: fadeScreenColor"
self.noTransitions()
self.loadFade()
self.fade.reparentTo(aspect2d, FADE_SORT_INDEX)
self.fade.setColor(color)
def noFade(self):
"""
Removes any current fade tasks and parents the fade polygon away
"""
#print "transitiosn: noFade"
if self.transitionIval:
self.transitionIval.pause()
self.transitionIval = None
if self.fade:
# Make sure to reset the color, since fadeOutActive() is looking at it
self.fade.setColor(self.alphaOff)
self.fade.detachNode()
def setFadeColor(self, r, g, b):
self.alphaOn.set(r, g, b, 1)
self.alphaOff.set(r, g, b, 0)
##################################################
# Iris
##################################################
def loadIris(self):
if self.iris == None:
self.iris = loader.loadModel(self.IrisModelName)
self.iris.setPos(0, 0, 0)
def irisIn(self, t=0.5, finishIval=None):
"""
Play an iris in transition over t seconds.
Places a polygon on the aspect2d plane then lerps the scale
of the iris polygon up so it looks like we iris in. When the
scale lerp is finished, it parents the iris polygon to hidden.
"""
self.noTransitions()
self.loadIris()
if (t == 0):
self.iris.detachNode()
else:
self.iris.reparentTo(aspect2d, FADE_SORT_INDEX)
self.transitionIval = Sequence(LerpScaleInterval(self.iris, t,
scale = 0.18,
startScale = 0.01),
Func(self.iris.detachNode),
name = self.irisTaskName,
)
if finishIval:
self.transitionIval.append(finishIval)
self.transitionIval.start()
def irisOut(self, t=0.5, finishIval=None):
"""
Play an iris out transition over t seconds.
Places a polygon on the aspect2d plane then lerps the scale
of the iris down so it looks like we iris out. When the scale
lerp is finished, it leaves the iris polygon covering the
aspect2d plane until you irisIn or call noIris.
"""
self.noTransitions()
self.loadIris()
self.loadFade() # we need this to cover up the hole.
if (t == 0):
self.iris.detachNode()
self.fadeOut(0)
else:
self.iris.reparentTo(aspect2d, FADE_SORT_INDEX)
self.transitionIval = Sequence(LerpScaleInterval(self.iris, t,
scale = 0.01,
startScale = 0.18),
Func(self.iris.detachNode),
# Use the fade to cover up the hole that the iris would leave
Func(self.fadeOut, 0),
name = self.irisTaskName,
)
if finishIval:
self.transitionIval.append(finishIval)
self.transitionIval.start()
def noIris(self):
"""
Removes any current iris tasks and parents the iris polygon away
"""
if self.transitionIval:
self.transitionIval.pause()
self.transitionIval = None
if self.iris != None:
self.iris.detachNode()
# Actually we need to remove the fade too,
# because the iris effect uses it.
self.noFade()
def noTransitions(self):
"""
This call should immediately remove any and all transitions running
"""
self.noFade()
self.noIris()
# Letterbox is not really a transition, it is a screen overlay
# self.noLetterbox()
##################################################
# Letterbox
##################################################
def loadLetterbox(self):
if not self.letterbox:
# We create a DirectFrame for the fade polygon, instead of
# simply loading the polygon model and using it directly,
# so that it will also obscure mouse events for objects
# positioned behind it.
self.letterbox = NodePath("letterbox")
# Allow fade in and out of the bars
self.letterbox.setTransparency(1)
# Allow DirectLabels to be parented to the letterbox sensibly
self.letterbox.setBin('unsorted', 0)
# Allow a custom look to the letterbox graphic.
# TODO: This model isn't available everywhere. We should
# pass it in as a parameter.
button = loader.loadModel('models/gui/toplevel_gui',
okMissing = True)
barImage = None
if button:
barImage = button.find('**/generic_button')
self.letterboxTop = DirectFrame(
parent = self.letterbox,
guiId = 'letterboxTop',
relief = DGG.FLAT,
state = DGG.NORMAL,
frameColor = (0, 0, 0, 1),
borderWidth = (0, 0),
frameSize = (-1, 1, 0, 0.2),
pos = (0, 0, 0.8),
image = barImage,
image_scale = (2.25,1,.5),
image_pos = (0,0,.1),
image_color = (0.3,0.3,0.3,1),
sortOrder = 0,
)
self.letterboxBottom = DirectFrame(
parent = self.letterbox,
guiId = 'letterboxBottom',
relief = DGG.FLAT,
state = DGG.NORMAL,
frameColor = (0, 0, 0, 1),
borderWidth = (0, 0),
frameSize = (-1, 1, 0, 0.2),
pos = (0, 0, -1),
image = barImage,
image_scale = (2.25,1,.5),
image_pos = (0,0,.1),
image_color = (0.3,0.3,0.3,1),
sortOrder = 0,
)
# masad: always place these at the bottom of render
self.letterboxTop.setBin('sorted',0)
self.letterboxBottom.setBin('sorted',0)
self.letterbox.reparentTo(render2d, -1)
self.letterboxOff(0)
def noLetterbox(self):
"""
Removes any current letterbox tasks and parents the letterbox polygon away
"""
if self.letterboxIval:
self.letterboxIval.pause()
self.letterboxIval = None
if self.letterbox:
self.letterbox.stash()
def letterboxOn(self, t=0.25, finishIval=None):
"""
Move black bars in over t seconds.
"""
self.noLetterbox()
self.loadLetterbox()
self.letterbox.unstash()
if (t == 0):
self.letterboxBottom.setPos(0, 0, -1)
self.letterboxTop.setPos(0, 0, 0.8)
else:
self.letterboxIval = Sequence(Parallel(
LerpPosInterval(self.letterboxBottom,
t,
pos = Vec3(0, 0, -1),
#startPos = Vec3(0, 0, -1.2),
),
LerpPosInterval(self.letterboxTop,
t,
pos = Vec3(0, 0, 0.8),
# startPos = Vec3(0, 0, 1),
),
),
name = self.letterboxTaskName,
)
if finishIval:
self.letterboxIval.append(finishIval)
self.letterboxIval.start()
def letterboxOff(self, t=0.25, finishIval=None):
"""
Move black bars away over t seconds.
"""
self.noLetterbox()
self.loadLetterbox()
self.letterbox.unstash()
if (t == 0):
self.letterbox.stash()
else:
self.letterboxIval = Sequence(Parallel(
LerpPosInterval(self.letterboxBottom,
t,
pos = Vec3(0, 0, -1.2),
# startPos = Vec3(0, 0, -1),
),
LerpPosInterval(self.letterboxTop,
t,
pos = Vec3(0, 0, 1),
# startPos = Vec3(0, 0, 0.8),
),
),
Func(self.letterbox.stash),
Func(messenger.send,'letterboxOff'),
name = self.letterboxTaskName,
)
if finishIval:
self.letterboxIval.append(finishIval)
self.letterboxIval.start()
| mgracer48/panda3d | direct/src/showbase/Transitions.py | Python | bsd-3-clause | 17,023 |
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from gpu_tests.gpu_test_expectations import GpuTestExpectations
# See the GpuTestExpectations class for documentation.
class ScreenshotSyncExpectations(GpuTestExpectations):
def __init__(self, *args, **kwargs):
super(ScreenshotSyncExpectations, self).__init__(*args, **kwargs)
def SetExpectations(self):
self.Flaky('ScreenshotSync.WithCanvas', ['win', 'amd'], bug=599776)
self.Flaky('ScreenshotSync.WithCanvas', ['mac', 'intel'], bug=599776)
self.Flaky('ScreenshotSync.WithDivs', ['mac', 'intel'], bug=599776)
| danakj/chromium | content/test/gpu/gpu_tests/screenshot_sync_expectations.py | Python | bsd-3-clause | 696 |
# -*- coding: utf-8 -*-
#
#
# Author: Yannick Vaucher
# Copyright 2014 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
{"name": "Logistics Consignee",
"summary": "Consignee on Sales, Purchases, Purchase requisition for Pickings",
"version": "0.1",
"author": "Camptocamp,Odoo Community Association (OCA)",
"license": "AGPL-3",
"category": "Logistics",
'complexity': "normal",
"images": [],
"website": "http://www.camptocamp.com",
"depends": ["base",
"sale_stock",
"purchase",
"purchase_requisition",
],
"demo": [],
"data": ['view/res_partner.xml',
'view/purchase_order.xml',
'view/purchase_requisition.xml',
'view/sale_order.xml',
'view/stock_picking.xml',
'view/report_saleorder.xml',
],
"test": ['test/test_report.yml'],
'installable': True,
"auto_install": False,
}
| mdietrichc2c/vertical-ngo | logistic_consignee/__openerp__.py | Python | agpl-3.0 | 1,559 |
__author__ = 'Dmitry Kolesov <[email protected]>'
| simgislab/address_utils | test_address/__init__.py | Python | gpl-2.0 | 53 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.