body
stringlengths
26
98.2k
body_hash
int64
-9,222,864,604,528,158,000
9,221,803,474B
docstring
stringlengths
1
16.8k
path
stringlengths
5
230
name
stringlengths
1
96
repository_name
stringlengths
7
89
lang
stringclasses
1 value
body_without_docstring
stringlengths
20
98.2k
def to_dict(self): 'Return values of each fields of an jsonapi error' error_dict = {} for field in ('status', 'source', 'title', 'detail', 'id', 'code', 'links', 'meta'): if getattr(self, field, None): error_dict.update({field: getattr(self, field)}) return error_dict
-1,829,391,211,692,273,700
Return values of each fields of an jsonapi error
flapison/exceptions.py
to_dict
Leechael/flapison
python
def to_dict(self): error_dict = {} for field in ('status', 'source', 'title', 'detail', 'id', 'code', 'links', 'meta'): if getattr(self, field, None): error_dict.update({field: getattr(self, field)}) return error_dict
def validate_meta_info(container, template): " Validate meta information\n\n Adds 'BIDS.NA' if no BIDS info present\n Adds 'BIDS.valid' and 'BIDS.error_message'\n to communicate to user if values are valid\n\n Currently, validation is only checking if\n mandatory properties are non-empty strings\n\n Could add the following checks:\n Are the values alpha numeric?\n\n\n " namespace = template.namespace if ('info' not in container): container['info'] = {namespace: 'NA'} elif (namespace not in container['info']): container['info'][namespace] = 'NA' elif (container['info'][namespace] == 'NA'): pass else: valid = True error_message = '' templateName = container['info'][namespace].get('template') if templateName: templateDef = template.definitions.get(templateName) if templateDef: errors = template.validate(templateDef, container['info'][namespace]) if errors: valid = False error_message = '\n'.join([format_validation_error(err) for err in errors]) else: valid = False error_message += ('Unknown template: %s. ' % templateName) container['info'][namespace]['valid'] = valid container['info'][namespace]['error_message'] = error_message
-6,676,395,166,213,369,000
Validate meta information Adds 'BIDS.NA' if no BIDS info present Adds 'BIDS.valid' and 'BIDS.error_message' to communicate to user if values are valid Currently, validation is only checking if mandatory properties are non-empty strings Could add the following checks: Are the values alpha numeric?
flywheel_bids/curate_bids.py
validate_meta_info
AndysWorth/bids-client
python
def validate_meta_info(container, template): " Validate meta information\n\n Adds 'BIDS.NA' if no BIDS info present\n Adds 'BIDS.valid' and 'BIDS.error_message'\n to communicate to user if values are valid\n\n Currently, validation is only checking if\n mandatory properties are non-empty strings\n\n Could add the following checks:\n Are the values alpha numeric?\n\n\n " namespace = template.namespace if ('info' not in container): container['info'] = {namespace: 'NA'} elif (namespace not in container['info']): container['info'][namespace] = 'NA' elif (container['info'][namespace] == 'NA'): pass else: valid = True error_message = templateName = container['info'][namespace].get('template') if templateName: templateDef = template.definitions.get(templateName) if templateDef: errors = template.validate(templateDef, container['info'][namespace]) if errors: valid = False error_message = '\n'.join([format_validation_error(err) for err in errors]) else: valid = False error_message += ('Unknown template: %s. ' % templateName) container['info'][namespace]['valid'] = valid container['info'][namespace]['error_message'] = error_message
def update_meta_info(fw, context): ' Update file information\n\n ' if (context['container_type'] == 'file'): if (context['parent_container_type'] == 'acquisition'): fw.set_acquisition_file_info(context['acquisition']['id'], context['file']['name'], context['file']['info']) elif (context['parent_container_type'] == 'project'): fw.set_project_file_info(context['project']['id'], context['file']['name'], context['file']['info']) elif (context['parent_container_type'] == 'session'): fw.set_session_file_info(context['session']['id'], context['file']['name'], context['file']['info']) else: logger.info(('Cannot determine file parent container type: ' + context['parent_container_type'])) elif (context['container_type'] == 'project'): fw.replace_project_info(context['project']['id'], context['project']['info']) elif (context['container_type'] == 'session'): fw.replace_session_info(context['session']['id'], context['session']['info']) elif (context['container_type'] == 'acquisition'): fw.replace_acquisition_info(context['acquisition']['id'], context['acquisition']['info']) else: logger.info(('Cannot determine container type: ' + context['container_type']))
3,638,618,362,590,002,700
Update file information
flywheel_bids/curate_bids.py
update_meta_info
AndysWorth/bids-client
python
def update_meta_info(fw, context): ' \n\n ' if (context['container_type'] == 'file'): if (context['parent_container_type'] == 'acquisition'): fw.set_acquisition_file_info(context['acquisition']['id'], context['file']['name'], context['file']['info']) elif (context['parent_container_type'] == 'project'): fw.set_project_file_info(context['project']['id'], context['file']['name'], context['file']['info']) elif (context['parent_container_type'] == 'session'): fw.set_session_file_info(context['session']['id'], context['file']['name'], context['file']['info']) else: logger.info(('Cannot determine file parent container type: ' + context['parent_container_type'])) elif (context['container_type'] == 'project'): fw.replace_project_info(context['project']['id'], context['project']['info']) elif (context['container_type'] == 'session'): fw.replace_session_info(context['session']['id'], context['session']['info']) elif (context['container_type'] == 'acquisition'): fw.replace_acquisition_info(context['acquisition']['id'], context['acquisition']['info']) else: logger.info(('Cannot determine container type: ' + context['container_type']))
def curate_bids_dir(fw, project_id, session_id=None, reset=False, template_file=None, session_only=False): '\n\n fw: Flywheel client\n project_id: project id of project to curate\n session_id: The optional session id to curate\n reset: Whether or not to reset bids info before curation\n template_file: The template file to use\n session_only: If true, then only curate the provided session\n\n ' project = get_project_tree(fw, project_id, session_id=session_id, session_only=session_only) curate_bids_tree(fw, project, reset, template_file, True)
2,355,357,426,333,451,300
fw: Flywheel client project_id: project id of project to curate session_id: The optional session id to curate reset: Whether or not to reset bids info before curation template_file: The template file to use session_only: If true, then only curate the provided session
flywheel_bids/curate_bids.py
curate_bids_dir
AndysWorth/bids-client
python
def curate_bids_dir(fw, project_id, session_id=None, reset=False, template_file=None, session_only=False): '\n\n fw: Flywheel client\n project_id: project id of project to curate\n session_id: The optional session id to curate\n reset: Whether or not to reset bids info before curation\n template_file: The template file to use\n session_only: If true, then only curate the provided session\n\n ' project = get_project_tree(fw, project_id, session_id=session_id, session_only=session_only) curate_bids_tree(fw, project, reset, template_file, True)
def test_basic_state_machine_inline_definition(self): '\n Creates a State Machine from inline definition\n ' self.create_and_verify_stack('basic_state_machine_inline_definition')
3,202,951,229,708,989,400
Creates a State Machine from inline definition
integration/single/test_basic_state_machine.py
test_basic_state_machine_inline_definition
faraz891/serverless-application-model
python
def test_basic_state_machine_inline_definition(self): '\n \n ' self.create_and_verify_stack('basic_state_machine_inline_definition')
def test_basic_state_machine_with_tags(self): '\n Creates a State Machine with tags\n ' self.create_and_verify_stack('basic_state_machine_with_tags') tags = self.get_stack_tags('MyStateMachineArn') self.assertIsNotNone(tags) self._verify_tag_presence(tags, 'stateMachine:createdBy', 'SAM') self._verify_tag_presence(tags, 'TagOne', 'ValueOne') self._verify_tag_presence(tags, 'TagTwo', 'ValueTwo')
-2,131,908,560,489,288,000
Creates a State Machine with tags
integration/single/test_basic_state_machine.py
test_basic_state_machine_with_tags
faraz891/serverless-application-model
python
def test_basic_state_machine_with_tags(self): '\n \n ' self.create_and_verify_stack('basic_state_machine_with_tags') tags = self.get_stack_tags('MyStateMachineArn') self.assertIsNotNone(tags) self._verify_tag_presence(tags, 'stateMachine:createdBy', 'SAM') self._verify_tag_presence(tags, 'TagOne', 'ValueOne') self._verify_tag_presence(tags, 'TagTwo', 'ValueTwo')
def _verify_tag_presence(self, tags, key, value): '\n Verifies the presence of a tag and its value\n\n Parameters\n ----------\n tags : List of dict\n List of tag objects\n key : string\n Tag key\n value : string\n Tag value\n ' tag = next((tag for tag in tags if (tag['key'] == key))) self.assertIsNotNone(tag) self.assertEqual(tag['value'], value)
-7,843,542,449,982,567,000
Verifies the presence of a tag and its value Parameters ---------- tags : List of dict List of tag objects key : string Tag key value : string Tag value
integration/single/test_basic_state_machine.py
_verify_tag_presence
faraz891/serverless-application-model
python
def _verify_tag_presence(self, tags, key, value): '\n Verifies the presence of a tag and its value\n\n Parameters\n ----------\n tags : List of dict\n List of tag objects\n key : string\n Tag key\n value : string\n Tag value\n ' tag = next((tag for tag in tags if (tag['key'] == key))) self.assertIsNotNone(tag) self.assertEqual(tag['value'], value)
async def async_setup_entry(hass, entry, async_add_devices) -> None: 'Setup sensor platform.' session = hass.data[DOMAIN][entry.entry_id] async_add_devices((AutomowerTracker(session, idx) for (idx, ent) in enumerate(session.data['data'])))
3,304,603,567,657,200,600
Setup sensor platform.
custom_components/husqvarna_automower/device_tracker.py
async_setup_entry
kalhimeo/husqvarna_automower
python
async def async_setup_entry(hass, entry, async_add_devices) -> None: session = hass.data[DOMAIN][entry.entry_id] async_add_devices((AutomowerTracker(session, idx) for (idx, ent) in enumerate(session.data['data'])))
@property def name(self) -> str: 'Return the name of the entity.' return self.mower_name
-2,878,577,418,636,612,600
Return the name of the entity.
custom_components/husqvarna_automower/device_tracker.py
name
kalhimeo/husqvarna_automower
python
@property def name(self) -> str: return self.mower_name
@property def unique_id(self) -> str: 'Return a unique identifier for this entity.' return f'{self.mower_id}_dt'
7,398,093,011,582,418,000
Return a unique identifier for this entity.
custom_components/husqvarna_automower/device_tracker.py
unique_id
kalhimeo/husqvarna_automower
python
@property def unique_id(self) -> str: return f'{self.mower_id}_dt'
@property def source_type(self) -> str: 'Return the source type, eg gps or router, of the device.' return SOURCE_TYPE_GPS
-4,133,710,505,846,444,000
Return the source type, eg gps or router, of the device.
custom_components/husqvarna_automower/device_tracker.py
source_type
kalhimeo/husqvarna_automower
python
@property def source_type(self) -> str: return SOURCE_TYPE_GPS
@property def latitude(self) -> float: 'Return latitude value of the device.' lat = self.__get_mower_attributes()['positions'][0]['latitude'] return lat
-7,468,841,394,407,920,000
Return latitude value of the device.
custom_components/husqvarna_automower/device_tracker.py
latitude
kalhimeo/husqvarna_automower
python
@property def latitude(self) -> float: lat = self.__get_mower_attributes()['positions'][0]['latitude'] return lat
@property def longitude(self) -> float: 'Return longitude value of the device.' lon = self.__get_mower_attributes()['positions'][0]['longitude'] return lon
-631,322,355,529,604,700
Return longitude value of the device.
custom_components/husqvarna_automower/device_tracker.py
longitude
kalhimeo/husqvarna_automower
python
@property def longitude(self) -> float: lon = self.__get_mower_attributes()['positions'][0]['longitude'] return lon
def main(): 'Run AFL repeatedly with externally supplied generated packet from STDIN.' application = faucet.Faucet(dpset=dpset.DPSet(), faucet_experimental_api=faucet_experimental_api.FaucetExperimentalAPI()) application.start() if (application.valves_manager is not None): for valve in list(application.valves_manager.valves.values()): state = valve.dp.dyn_finalized valve.dp.dyn_finalized = False valve.dp.running = True valve.dp.dyn_finalized = state while afl.loop(ROUNDS): rcv = sys.stdin.read() data = None try: data = bytearray.fromhex(rcv) except (ValueError, TypeError): continue _dp = fake_packet.Datapath(1) msg = fake_packet.Message(datapath=_dp, cookie=15243729, port=1, data=data, in_port=1) pkt = fake_packet.RyuEvent(msg) application.packet_in_handler(pkt)
-2,766,946,133,878,956,000
Run AFL repeatedly with externally supplied generated packet from STDIN.
tests/fuzzer/fuzz_packet.py
main
1ndochine/faucet
python
def main(): application = faucet.Faucet(dpset=dpset.DPSet(), faucet_experimental_api=faucet_experimental_api.FaucetExperimentalAPI()) application.start() if (application.valves_manager is not None): for valve in list(application.valves_manager.valves.values()): state = valve.dp.dyn_finalized valve.dp.dyn_finalized = False valve.dp.running = True valve.dp.dyn_finalized = state while afl.loop(ROUNDS): rcv = sys.stdin.read() data = None try: data = bytearray.fromhex(rcv) except (ValueError, TypeError): continue _dp = fake_packet.Datapath(1) msg = fake_packet.Message(datapath=_dp, cookie=15243729, port=1, data=data, in_port=1) pkt = fake_packet.RyuEvent(msg) application.packet_in_handler(pkt)
def __add(self, name, x_axis, y_axis, **kwargs): '\n\n :param name:\n 系列名称,用于 tooltip 的显示,legend 的图例筛选。\n :param x_axis:\n x 坐标轴数据。\n :param y_axis:\n y 坐标轴数据。数据中,每一行是一个『数据项』,每一列属于一个『维度』。\n 数据项具体为 [open, close, lowest, highest] (即:[开盘值, 收盘值,\n 最低值, 最高值])。\n :param kwargs:\n ' kwargs.update(type='candlestick', x_axis=x_axis) if ('tooltip_formatter' not in kwargs): kwargs['tooltip_formatter'] = kline_tooltip_formatter if ('tooltip_trigger' not in kwargs): kwargs['tooltip_trigger'] = 'axis' chart = self._get_all_options(**kwargs) (xaxis, yaxis) = chart['xy_axis'] self._option.update(xAxis=xaxis, yAxis=yaxis) self._option.get('xAxis')[0]['scale'] = True self._option.get('yAxis')[0]['scale'] = True self._option.get('yAxis')[0]['splitArea'] = {'show': True} self._option.get('legend')[0].get('data').append(name) self._option.get('series').append({'type': 'candlestick', 'name': name, 'data': y_axis, 'markPoint': chart['mark_point'], 'markLine': chart['mark_line'], 'seriesId': self._option.get('series_id')}) self._config_components(**kwargs)
-7,801,484,974,135,266,000
:param name: 系列名称,用于 tooltip 的显示,legend 的图例筛选。 :param x_axis: x 坐标轴数据。 :param y_axis: y 坐标轴数据。数据中,每一行是一个『数据项』,每一列属于一个『维度』。 数据项具体为 [open, close, lowest, highest] (即:[开盘值, 收盘值, 最低值, 最高值])。 :param kwargs:
venv/lib/python3.7/site-packages/pyecharts/charts/kline.py
__add
Amoswish/graduaction_design_pubgprediction
python
def __add(self, name, x_axis, y_axis, **kwargs): '\n\n :param name:\n 系列名称,用于 tooltip 的显示,legend 的图例筛选。\n :param x_axis:\n x 坐标轴数据。\n :param y_axis:\n y 坐标轴数据。数据中,每一行是一个『数据项』,每一列属于一个『维度』。\n 数据项具体为 [open, close, lowest, highest] (即:[开盘值, 收盘值,\n 最低值, 最高值])。\n :param kwargs:\n ' kwargs.update(type='candlestick', x_axis=x_axis) if ('tooltip_formatter' not in kwargs): kwargs['tooltip_formatter'] = kline_tooltip_formatter if ('tooltip_trigger' not in kwargs): kwargs['tooltip_trigger'] = 'axis' chart = self._get_all_options(**kwargs) (xaxis, yaxis) = chart['xy_axis'] self._option.update(xAxis=xaxis, yAxis=yaxis) self._option.get('xAxis')[0]['scale'] = True self._option.get('yAxis')[0]['scale'] = True self._option.get('yAxis')[0]['splitArea'] = {'show': True} self._option.get('legend')[0].get('data').append(name) self._option.get('series').append({'type': 'candlestick', 'name': name, 'data': y_axis, 'markPoint': chart['mark_point'], 'markLine': chart['mark_line'], 'seriesId': self._option.get('series_id')}) self._config_components(**kwargs)
@staticmethod def CreateDefaultFabricSheetType(ADoc): '\n CreateDefaultFabricSheetType(ADoc: Document) -> ElementId\n\n \n\n Creates a new FabricSheetType object with a default name.\n\n \n\n ADoc: The document.\n\n Returns: The newly created type id.\n ' pass
2,747,348,701,114,543,000
CreateDefaultFabricSheetType(ADoc: Document) -> ElementId Creates a new FabricSheetType object with a default name. ADoc: The document. Returns: The newly created type id.
release/stubs.min/Autodesk/Revit/DB/Structure/__init___parts/FabricSheetType.py
CreateDefaultFabricSheetType
BCSharp/ironpython-stubs
python
@staticmethod def CreateDefaultFabricSheetType(ADoc): '\n CreateDefaultFabricSheetType(ADoc: Document) -> ElementId\n\n \n\n Creates a new FabricSheetType object with a default name.\n\n \n\n ADoc: The document.\n\n Returns: The newly created type id.\n ' pass
def Dispose(self): ' Dispose(self: Element,A_0: bool) ' pass
-1,686,048,740,131,138,300
Dispose(self: Element,A_0: bool)
release/stubs.min/Autodesk/Revit/DB/Structure/__init___parts/FabricSheetType.py
Dispose
BCSharp/ironpython-stubs
python
def Dispose(self): ' ' pass
def getBoundingBox(self, *args): ' getBoundingBox(self: Element,view: View) -> BoundingBoxXYZ ' pass
5,691,465,885,205,531,000
getBoundingBox(self: Element,view: View) -> BoundingBoxXYZ
release/stubs.min/Autodesk/Revit/DB/Structure/__init___parts/FabricSheetType.py
getBoundingBox
BCSharp/ironpython-stubs
python
def getBoundingBox(self, *args): ' ' pass
def GetReinforcementRoundingManager(self): '\n GetReinforcementRoundingManager(self: FabricSheetType) -> FabricRoundingManager\n\n \n\n Returns an object for managing reinforcement rounding override settings.\n\n Returns: The rounding manager.\n ' pass
-2,272,743,556,645,741,000
GetReinforcementRoundingManager(self: FabricSheetType) -> FabricRoundingManager Returns an object for managing reinforcement rounding override settings. Returns: The rounding manager.
release/stubs.min/Autodesk/Revit/DB/Structure/__init___parts/FabricSheetType.py
GetReinforcementRoundingManager
BCSharp/ironpython-stubs
python
def GetReinforcementRoundingManager(self): '\n GetReinforcementRoundingManager(self: FabricSheetType) -> FabricRoundingManager\n\n \n\n Returns an object for managing reinforcement rounding override settings.\n\n Returns: The rounding manager.\n ' pass
def GetWireItem(self, wireIndex, direction): '\n GetWireItem(self: FabricSheetType,wireIndex: int,direction: WireDistributionDirection) -> FabricWireItem\n\n \n\n Gets the Wire stored in the FabricSheetType at the associated index.\n\n \n\n wireIndex: Item index in the Fabric Sheet\n\n direction: Wire distribution direction of the inquired item\n\n Returns: Fabric wire Item\n ' pass
7,446,795,452,584,217,000
GetWireItem(self: FabricSheetType,wireIndex: int,direction: WireDistributionDirection) -> FabricWireItem Gets the Wire stored in the FabricSheetType at the associated index. wireIndex: Item index in the Fabric Sheet direction: Wire distribution direction of the inquired item Returns: Fabric wire Item
release/stubs.min/Autodesk/Revit/DB/Structure/__init___parts/FabricSheetType.py
GetWireItem
BCSharp/ironpython-stubs
python
def GetWireItem(self, wireIndex, direction): '\n GetWireItem(self: FabricSheetType,wireIndex: int,direction: WireDistributionDirection) -> FabricWireItem\n\n \n\n Gets the Wire stored in the FabricSheetType at the associated index.\n\n \n\n wireIndex: Item index in the Fabric Sheet\n\n direction: Wire distribution direction of the inquired item\n\n Returns: Fabric wire Item\n ' pass
def IsCustom(self): '\n IsCustom(self: FabricSheetType) -> bool\n\n \n\n Verifies if the type is Custom Fabric Sheet\n\n Returns: True if Layout is set on Custom and if the wireArr is not null\n ' pass
3,391,039,994,397,098,500
IsCustom(self: FabricSheetType) -> bool Verifies if the type is Custom Fabric Sheet Returns: True if Layout is set on Custom and if the wireArr is not null
release/stubs.min/Autodesk/Revit/DB/Structure/__init___parts/FabricSheetType.py
IsCustom
BCSharp/ironpython-stubs
python
def IsCustom(self): '\n IsCustom(self: FabricSheetType) -> bool\n\n \n\n Verifies if the type is Custom Fabric Sheet\n\n Returns: True if Layout is set on Custom and if the wireArr is not null\n ' pass
def IsValidMajorLapSplice(self, majorLapSplice): '\n IsValidMajorLapSplice(self: FabricSheetType,majorLapSplice: float) -> bool\n\n \n\n Identifies if the input value is valid to be applied as the major lap splice\n\n \n\n value for this FabricSheetType.\n ' pass
-5,556,407,697,711,791,000
IsValidMajorLapSplice(self: FabricSheetType,majorLapSplice: float) -> bool Identifies if the input value is valid to be applied as the major lap splice value for this FabricSheetType.
release/stubs.min/Autodesk/Revit/DB/Structure/__init___parts/FabricSheetType.py
IsValidMajorLapSplice
BCSharp/ironpython-stubs
python
def IsValidMajorLapSplice(self, majorLapSplice): '\n IsValidMajorLapSplice(self: FabricSheetType,majorLapSplice: float) -> bool\n\n \n\n Identifies if the input value is valid to be applied as the major lap splice\n\n \n\n value for this FabricSheetType.\n ' pass
def IsValidMinorLapSplice(self, minorLapSplice): '\n IsValidMinorLapSplice(self: FabricSheetType,minorLapSplice: float) -> bool\n\n \n\n Identifies if the input value is valid to be applied as the minor lap splice\n\n \n\n value for this FabricSheetType.\n ' pass
-728,808,364,812,891,900
IsValidMinorLapSplice(self: FabricSheetType,minorLapSplice: float) -> bool Identifies if the input value is valid to be applied as the minor lap splice value for this FabricSheetType.
release/stubs.min/Autodesk/Revit/DB/Structure/__init___parts/FabricSheetType.py
IsValidMinorLapSplice
BCSharp/ironpython-stubs
python
def IsValidMinorLapSplice(self, minorLapSplice): '\n IsValidMinorLapSplice(self: FabricSheetType,minorLapSplice: float) -> bool\n\n \n\n Identifies if the input value is valid to be applied as the minor lap splice\n\n \n\n value for this FabricSheetType.\n ' pass
def ReleaseUnmanagedResources(self, *args): ' ReleaseUnmanagedResources(self: Element,disposing: bool) ' pass
-5,457,876,814,946,568,000
ReleaseUnmanagedResources(self: Element,disposing: bool)
release/stubs.min/Autodesk/Revit/DB/Structure/__init___parts/FabricSheetType.py
ReleaseUnmanagedResources
BCSharp/ironpython-stubs
python
def ReleaseUnmanagedResources(self, *args): ' ' pass
def setElementType(self, *args): ' setElementType(self: Element,type: ElementType,incompatibleExceptionMessage: str) ' pass
2,544,228,957,635,987,500
setElementType(self: Element,type: ElementType,incompatibleExceptionMessage: str)
release/stubs.min/Autodesk/Revit/DB/Structure/__init___parts/FabricSheetType.py
setElementType
BCSharp/ironpython-stubs
python
def setElementType(self, *args): ' ' pass
def SetLayoutAsCustomPattern(self, minorStartOverhang, minorEndOverhang, majorStartOverhang, majorEndOverhang, minorFabricWireItems, majorFabricWireItems): ' SetLayoutAsCustomPattern(self: FabricSheetType,minorStartOverhang: float,minorEndOverhang: float,majorStartOverhang: float,majorEndOverhang: float,minorFabricWireItems: IList[FabricWireItem],majorFabricWireItems: IList[FabricWireItem]) ' pass
-7,774,722,311,941,563,000
SetLayoutAsCustomPattern(self: FabricSheetType,minorStartOverhang: float,minorEndOverhang: float,majorStartOverhang: float,majorEndOverhang: float,minorFabricWireItems: IList[FabricWireItem],majorFabricWireItems: IList[FabricWireItem])
release/stubs.min/Autodesk/Revit/DB/Structure/__init___parts/FabricSheetType.py
SetLayoutAsCustomPattern
BCSharp/ironpython-stubs
python
def SetLayoutAsCustomPattern(self, minorStartOverhang, minorEndOverhang, majorStartOverhang, majorEndOverhang, minorFabricWireItems, majorFabricWireItems): ' ' pass
def SetMajorLayoutAsActualSpacing(self, overallWidth, minorStartOverhang, spacing): '\n SetMajorLayoutAsActualSpacing(self: FabricSheetType,overallWidth: float,minorStartOverhang: float,spacing: float)\n\n Sets the major layout pattern as ActualSpacing,while specifying the needed \n\n parameters for this pattern.\n\n \n\n \n\n overallWidth: The entire width of the wire sheet in the minor direction.\n\n minorStartOverhang: The distance from the edge of the sheet to the first wire in the minor \n\n direction.\n\n \n\n spacing: The distance between the wires in the major direction.\n ' pass
-5,270,166,848,306,150,000
SetMajorLayoutAsActualSpacing(self: FabricSheetType,overallWidth: float,minorStartOverhang: float,spacing: float) Sets the major layout pattern as ActualSpacing,while specifying the needed parameters for this pattern. overallWidth: The entire width of the wire sheet in the minor direction. minorStartOverhang: The distance from the edge of the sheet to the first wire in the minor direction. spacing: The distance between the wires in the major direction.
release/stubs.min/Autodesk/Revit/DB/Structure/__init___parts/FabricSheetType.py
SetMajorLayoutAsActualSpacing
BCSharp/ironpython-stubs
python
def SetMajorLayoutAsActualSpacing(self, overallWidth, minorStartOverhang, spacing): '\n SetMajorLayoutAsActualSpacing(self: FabricSheetType,overallWidth: float,minorStartOverhang: float,spacing: float)\n\n Sets the major layout pattern as ActualSpacing,while specifying the needed \n\n parameters for this pattern.\n\n \n\n \n\n overallWidth: The entire width of the wire sheet in the minor direction.\n\n minorStartOverhang: The distance from the edge of the sheet to the first wire in the minor \n\n direction.\n\n \n\n spacing: The distance between the wires in the major direction.\n ' pass
def SetMajorLayoutAsFixedNumber(self, overallWidth, minorStartOverhang, minorEndOverhang, numberOfWires): '\n SetMajorLayoutAsFixedNumber(self: FabricSheetType,overallWidth: float,minorStartOverhang: float,minorEndOverhang: float,numberOfWires: int)\n\n Sets the major layout pattern as FixedNumber,while specifying the needed \n\n parameters for this pattern.\n\n \n\n \n\n overallWidth: The entire width of the wire sheet in the minor direction.\n\n minorStartOverhang: The distance from the edge of the sheet to the first wire in the minor \n\n direction.\n\n \n\n minorEndOverhang: The distance from the last wire to the edge of the sheet in the minor direction.\n\n numberOfWires: The number of the wires to set in the major direction.\n ' pass
-9,019,583,894,977,338,000
SetMajorLayoutAsFixedNumber(self: FabricSheetType,overallWidth: float,minorStartOverhang: float,minorEndOverhang: float,numberOfWires: int) Sets the major layout pattern as FixedNumber,while specifying the needed parameters for this pattern. overallWidth: The entire width of the wire sheet in the minor direction. minorStartOverhang: The distance from the edge of the sheet to the first wire in the minor direction. minorEndOverhang: The distance from the last wire to the edge of the sheet in the minor direction. numberOfWires: The number of the wires to set in the major direction.
release/stubs.min/Autodesk/Revit/DB/Structure/__init___parts/FabricSheetType.py
SetMajorLayoutAsFixedNumber
BCSharp/ironpython-stubs
python
def SetMajorLayoutAsFixedNumber(self, overallWidth, minorStartOverhang, minorEndOverhang, numberOfWires): '\n SetMajorLayoutAsFixedNumber(self: FabricSheetType,overallWidth: float,minorStartOverhang: float,minorEndOverhang: float,numberOfWires: int)\n\n Sets the major layout pattern as FixedNumber,while specifying the needed \n\n parameters for this pattern.\n\n \n\n \n\n overallWidth: The entire width of the wire sheet in the minor direction.\n\n minorStartOverhang: The distance from the edge of the sheet to the first wire in the minor \n\n direction.\n\n \n\n minorEndOverhang: The distance from the last wire to the edge of the sheet in the minor direction.\n\n numberOfWires: The number of the wires to set in the major direction.\n ' pass
def SetMajorLayoutAsMaximumSpacing(self, overallWidth, minorStartOverhang, minorEndOverhang, spacing): '\n SetMajorLayoutAsMaximumSpacing(self: FabricSheetType,overallWidth: float,minorStartOverhang: float,minorEndOverhang: float,spacing: float)\n\n Sets the major layout pattern as MaximumSpacing,while specifying the needed \n\n parameters for this pattern.\n\n \n\n \n\n overallWidth: The entire width of the wire sheet in the minor direction.\n\n minorStartOverhang: The distance from the edge of the sheet to the first wire in the minor \n\n direction.\n\n \n\n minorEndOverhang: The distance from the last wire to the edge of the sheet in the minor direction.\n\n spacing: The distance between the wires in the major direction.\n ' pass
7,893,129,699,790,465,000
SetMajorLayoutAsMaximumSpacing(self: FabricSheetType,overallWidth: float,minorStartOverhang: float,minorEndOverhang: float,spacing: float) Sets the major layout pattern as MaximumSpacing,while specifying the needed parameters for this pattern. overallWidth: The entire width of the wire sheet in the minor direction. minorStartOverhang: The distance from the edge of the sheet to the first wire in the minor direction. minorEndOverhang: The distance from the last wire to the edge of the sheet in the minor direction. spacing: The distance between the wires in the major direction.
release/stubs.min/Autodesk/Revit/DB/Structure/__init___parts/FabricSheetType.py
SetMajorLayoutAsMaximumSpacing
BCSharp/ironpython-stubs
python
def SetMajorLayoutAsMaximumSpacing(self, overallWidth, minorStartOverhang, minorEndOverhang, spacing): '\n SetMajorLayoutAsMaximumSpacing(self: FabricSheetType,overallWidth: float,minorStartOverhang: float,minorEndOverhang: float,spacing: float)\n\n Sets the major layout pattern as MaximumSpacing,while specifying the needed \n\n parameters for this pattern.\n\n \n\n \n\n overallWidth: The entire width of the wire sheet in the minor direction.\n\n minorStartOverhang: The distance from the edge of the sheet to the first wire in the minor \n\n direction.\n\n \n\n minorEndOverhang: The distance from the last wire to the edge of the sheet in the minor direction.\n\n spacing: The distance between the wires in the major direction.\n ' pass
def SetMajorLayoutAsNumberWithSpacing(self, overallWidth, minorStartOverhang, numberOfWires, spacing): '\n SetMajorLayoutAsNumberWithSpacing(self: FabricSheetType,overallWidth: float,minorStartOverhang: float,numberOfWires: int,spacing: float)\n\n Sets the major layout pattern as NumberWithSpacing,while specifying the needed \n\n parameters for this pattern.\n\n \n\n \n\n overallWidth: The entire width of the wire sheet in the minor direction.\n\n minorStartOverhang: The distance from the edge of the sheet to the first wire in the minor \n\n direction.\n\n \n\n numberOfWires: The number of the wires to set in the major direction.\n\n spacing: The distance between the wires in the major direction.\n ' pass
-2,168,758,701,225,340,400
SetMajorLayoutAsNumberWithSpacing(self: FabricSheetType,overallWidth: float,minorStartOverhang: float,numberOfWires: int,spacing: float) Sets the major layout pattern as NumberWithSpacing,while specifying the needed parameters for this pattern. overallWidth: The entire width of the wire sheet in the minor direction. minorStartOverhang: The distance from the edge of the sheet to the first wire in the minor direction. numberOfWires: The number of the wires to set in the major direction. spacing: The distance between the wires in the major direction.
release/stubs.min/Autodesk/Revit/DB/Structure/__init___parts/FabricSheetType.py
SetMajorLayoutAsNumberWithSpacing
BCSharp/ironpython-stubs
python
def SetMajorLayoutAsNumberWithSpacing(self, overallWidth, minorStartOverhang, numberOfWires, spacing): '\n SetMajorLayoutAsNumberWithSpacing(self: FabricSheetType,overallWidth: float,minorStartOverhang: float,numberOfWires: int,spacing: float)\n\n Sets the major layout pattern as NumberWithSpacing,while specifying the needed \n\n parameters for this pattern.\n\n \n\n \n\n overallWidth: The entire width of the wire sheet in the minor direction.\n\n minorStartOverhang: The distance from the edge of the sheet to the first wire in the minor \n\n direction.\n\n \n\n numberOfWires: The number of the wires to set in the major direction.\n\n spacing: The distance between the wires in the major direction.\n ' pass
def SetMinorLayoutAsActualSpacing(self, overallLength, majorStartOverhang, spacing): '\n SetMinorLayoutAsActualSpacing(self: FabricSheetType,overallLength: float,majorStartOverhang: float,spacing: float)\n\n Sets the minor layout pattern as ActualSpacing,while specifying the needed \n\n parameters for this pattern.\n\n \n\n \n\n overallLength: The entire length of the wire sheet in the major direction.\n\n majorStartOverhang: The distance from the edge of the sheet to the first wire in the major \n\n direction.\n\n \n\n spacing: The distance between the wires in the minor direction.\n ' pass
4,713,439,067,194,703,000
SetMinorLayoutAsActualSpacing(self: FabricSheetType,overallLength: float,majorStartOverhang: float,spacing: float) Sets the minor layout pattern as ActualSpacing,while specifying the needed parameters for this pattern. overallLength: The entire length of the wire sheet in the major direction. majorStartOverhang: The distance from the edge of the sheet to the first wire in the major direction. spacing: The distance between the wires in the minor direction.
release/stubs.min/Autodesk/Revit/DB/Structure/__init___parts/FabricSheetType.py
SetMinorLayoutAsActualSpacing
BCSharp/ironpython-stubs
python
def SetMinorLayoutAsActualSpacing(self, overallLength, majorStartOverhang, spacing): '\n SetMinorLayoutAsActualSpacing(self: FabricSheetType,overallLength: float,majorStartOverhang: float,spacing: float)\n\n Sets the minor layout pattern as ActualSpacing,while specifying the needed \n\n parameters for this pattern.\n\n \n\n \n\n overallLength: The entire length of the wire sheet in the major direction.\n\n majorStartOverhang: The distance from the edge of the sheet to the first wire in the major \n\n direction.\n\n \n\n spacing: The distance between the wires in the minor direction.\n ' pass
def SetMinorLayoutAsFixedNumber(self, overallLength, majorStartOverhang, majorEndOverhang, numberOfWires): '\n SetMinorLayoutAsFixedNumber(self: FabricSheetType,overallLength: float,majorStartOverhang: float,majorEndOverhang: float,numberOfWires: int)\n\n Sets the major layout pattern as FixedNumber,while specifying the needed \n\n parameters for this pattern.\n\n \n\n \n\n overallLength: The entire length of the wire sheet in the major direction.\n\n majorStartOverhang: The distance from the edge of the sheet to the first wire in the major \n\n direction.\n\n \n\n majorEndOverhang: The distance from the last wire to the edge of the sheet in the major direction.\n\n numberOfWires: The number of the wires to set in the minor direction.\n ' pass
6,722,719,487,200,080,000
SetMinorLayoutAsFixedNumber(self: FabricSheetType,overallLength: float,majorStartOverhang: float,majorEndOverhang: float,numberOfWires: int) Sets the major layout pattern as FixedNumber,while specifying the needed parameters for this pattern. overallLength: The entire length of the wire sheet in the major direction. majorStartOverhang: The distance from the edge of the sheet to the first wire in the major direction. majorEndOverhang: The distance from the last wire to the edge of the sheet in the major direction. numberOfWires: The number of the wires to set in the minor direction.
release/stubs.min/Autodesk/Revit/DB/Structure/__init___parts/FabricSheetType.py
SetMinorLayoutAsFixedNumber
BCSharp/ironpython-stubs
python
def SetMinorLayoutAsFixedNumber(self, overallLength, majorStartOverhang, majorEndOverhang, numberOfWires): '\n SetMinorLayoutAsFixedNumber(self: FabricSheetType,overallLength: float,majorStartOverhang: float,majorEndOverhang: float,numberOfWires: int)\n\n Sets the major layout pattern as FixedNumber,while specifying the needed \n\n parameters for this pattern.\n\n \n\n \n\n overallLength: The entire length of the wire sheet in the major direction.\n\n majorStartOverhang: The distance from the edge of the sheet to the first wire in the major \n\n direction.\n\n \n\n majorEndOverhang: The distance from the last wire to the edge of the sheet in the major direction.\n\n numberOfWires: The number of the wires to set in the minor direction.\n ' pass
def SetMinorLayoutAsMaximumSpacing(self, overallLength, majorStartOverhang, majorEndOverhang, spacing): '\n SetMinorLayoutAsMaximumSpacing(self: FabricSheetType,overallLength: float,majorStartOverhang: float,majorEndOverhang: float,spacing: float)\n\n Sets the major layout pattern as MaximumSpacing,while specifying the needed \n\n parameters for this pattern.\n\n \n\n \n\n overallLength: The entire length of the wire sheet in the major direction.\n\n majorStartOverhang: The distance from the edge of the sheet to the first wire in the major \n\n direction.\n\n \n\n majorEndOverhang: The distance from the last wire to the edge of the sheet in the major direction.\n\n spacing: The distance between the wires in the minor direction.\n ' pass
6,353,136,580,958,307,000
SetMinorLayoutAsMaximumSpacing(self: FabricSheetType,overallLength: float,majorStartOverhang: float,majorEndOverhang: float,spacing: float) Sets the major layout pattern as MaximumSpacing,while specifying the needed parameters for this pattern. overallLength: The entire length of the wire sheet in the major direction. majorStartOverhang: The distance from the edge of the sheet to the first wire in the major direction. majorEndOverhang: The distance from the last wire to the edge of the sheet in the major direction. spacing: The distance between the wires in the minor direction.
release/stubs.min/Autodesk/Revit/DB/Structure/__init___parts/FabricSheetType.py
SetMinorLayoutAsMaximumSpacing
BCSharp/ironpython-stubs
python
def SetMinorLayoutAsMaximumSpacing(self, overallLength, majorStartOverhang, majorEndOverhang, spacing): '\n SetMinorLayoutAsMaximumSpacing(self: FabricSheetType,overallLength: float,majorStartOverhang: float,majorEndOverhang: float,spacing: float)\n\n Sets the major layout pattern as MaximumSpacing,while specifying the needed \n\n parameters for this pattern.\n\n \n\n \n\n overallLength: The entire length of the wire sheet in the major direction.\n\n majorStartOverhang: The distance from the edge of the sheet to the first wire in the major \n\n direction.\n\n \n\n majorEndOverhang: The distance from the last wire to the edge of the sheet in the major direction.\n\n spacing: The distance between the wires in the minor direction.\n ' pass
def SetMinorLayoutAsNumberWithSpacing(self, overallLength, majorStartOverhang, numberOfWires, spacing): '\n SetMinorLayoutAsNumberWithSpacing(self: FabricSheetType,overallLength: float,majorStartOverhang: float,numberOfWires: int,spacing: float)\n\n Sets the major layout pattern as NumberWithSpacing,while specifying the needed \n\n parameters for this pattern.\n\n \n\n \n\n overallLength: The entire length of the wire sheet in the major direction.\n\n majorStartOverhang: The distance from the edge of the sheet to the first wire in the major \n\n direction.\n\n \n\n numberOfWires: The number of wires in the minor direction.\n\n spacing: The distance between the wires in the minor direction.\n ' pass
8,472,021,804,934,113,000
SetMinorLayoutAsNumberWithSpacing(self: FabricSheetType,overallLength: float,majorStartOverhang: float,numberOfWires: int,spacing: float) Sets the major layout pattern as NumberWithSpacing,while specifying the needed parameters for this pattern. overallLength: The entire length of the wire sheet in the major direction. majorStartOverhang: The distance from the edge of the sheet to the first wire in the major direction. numberOfWires: The number of wires in the minor direction. spacing: The distance between the wires in the minor direction.
release/stubs.min/Autodesk/Revit/DB/Structure/__init___parts/FabricSheetType.py
SetMinorLayoutAsNumberWithSpacing
BCSharp/ironpython-stubs
python
def SetMinorLayoutAsNumberWithSpacing(self, overallLength, majorStartOverhang, numberOfWires, spacing): '\n SetMinorLayoutAsNumberWithSpacing(self: FabricSheetType,overallLength: float,majorStartOverhang: float,numberOfWires: int,spacing: float)\n\n Sets the major layout pattern as NumberWithSpacing,while specifying the needed \n\n parameters for this pattern.\n\n \n\n \n\n overallLength: The entire length of the wire sheet in the major direction.\n\n majorStartOverhang: The distance from the edge of the sheet to the first wire in the major \n\n direction.\n\n \n\n numberOfWires: The number of wires in the minor direction.\n\n spacing: The distance between the wires in the minor direction.\n ' pass
def __enter__(self, *args): ' __enter__(self: IDisposable) -> object ' pass
-4,485,805,406,909,797,400
__enter__(self: IDisposable) -> object
release/stubs.min/Autodesk/Revit/DB/Structure/__init___parts/FabricSheetType.py
__enter__
BCSharp/ironpython-stubs
python
def __enter__(self, *args): ' ' pass
def __exit__(self, *args): ' __exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object) ' pass
-8,148,954,987,636,554,000
__exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object)
release/stubs.min/Autodesk/Revit/DB/Structure/__init___parts/FabricSheetType.py
__exit__
BCSharp/ironpython-stubs
python
def __exit__(self, *args): ' ' pass
def __init__(self, *args): ' x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature ' pass
-90,002,593,062,007,400
x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature
release/stubs.min/Autodesk/Revit/DB/Structure/__init___parts/FabricSheetType.py
__init__
BCSharp/ironpython-stubs
python
def __init__(self, *args): ' ' pass
def test_rss2_feed(self): '\n Test the structure and content of feeds generated by Rss201rev2Feed.\n ' response = self.client.get('/syndication/rss2/') doc = minidom.parseString(response.content) feed_elem = doc.getElementsByTagName('rss') self.assertEqual(len(feed_elem), 1) feed = feed_elem[0] self.assertEqual(feed.getAttribute('version'), '2.0') chan_elem = feed.getElementsByTagName('channel') self.assertEqual(len(chan_elem), 1) chan = chan_elem[0] d = Entry.objects.latest('published').published ltz = tzinfo.LocalTimezone(d) last_build_date = rfc2822_date(d.replace(tzinfo=ltz)) self.assertChildNodes(chan, ['title', 'link', 'description', 'language', 'lastBuildDate', 'item', 'atom:link', 'ttl', 'copyright', 'category']) self.assertChildNodeContent(chan, {'title': 'My blog', 'description': 'A more thorough description of my blog.', 'link': 'http://example.com/blog/', 'language': 'en', 'lastBuildDate': last_build_date, 'ttl': '600', 'copyright': 'Copyright (c) 2007, Sally Smith'}) self.assertCategories(chan, ['python', 'django']) self.assertChildNodeContent(chan, {'title': 'My blog', 'link': 'http://example.com/blog/'}) self.assertEqual(chan.getElementsByTagName('atom:link')[0].getAttribute('href'), 'http://example.com/syndication/rss2/') d = Entry.objects.get(pk=1).published ltz = tzinfo.LocalTimezone(d) pub_date = rfc2822_date(d.replace(tzinfo=ltz)) items = chan.getElementsByTagName('item') self.assertEqual(len(items), Entry.objects.count()) self.assertChildNodeContent(items[0], {'title': 'My first entry', 'description': 'Overridden description: My first entry', 'link': 'http://example.com/blog/1/', 'guid': 'http://example.com/blog/1/', 'pubDate': pub_date, 'author': '[email protected] (Sally Smith)'}) self.assertCategories(items[0], ['python', 'testing']) for item in items: self.assertChildNodes(item, ['title', 'link', 'description', 'guid', 'category', 'pubDate', 'author']) self.assertIsNone(item.getElementsByTagName('guid')[0].attributes.get('isPermaLink'))
557,685,396,015,426,800
Test the structure and content of feeds generated by Rss201rev2Feed.
tests/syndication/tests.py
test_rss2_feed
adambrenecki/django
python
def test_rss2_feed(self): '\n \n ' response = self.client.get('/syndication/rss2/') doc = minidom.parseString(response.content) feed_elem = doc.getElementsByTagName('rss') self.assertEqual(len(feed_elem), 1) feed = feed_elem[0] self.assertEqual(feed.getAttribute('version'), '2.0') chan_elem = feed.getElementsByTagName('channel') self.assertEqual(len(chan_elem), 1) chan = chan_elem[0] d = Entry.objects.latest('published').published ltz = tzinfo.LocalTimezone(d) last_build_date = rfc2822_date(d.replace(tzinfo=ltz)) self.assertChildNodes(chan, ['title', 'link', 'description', 'language', 'lastBuildDate', 'item', 'atom:link', 'ttl', 'copyright', 'category']) self.assertChildNodeContent(chan, {'title': 'My blog', 'description': 'A more thorough description of my blog.', 'link': 'http://example.com/blog/', 'language': 'en', 'lastBuildDate': last_build_date, 'ttl': '600', 'copyright': 'Copyright (c) 2007, Sally Smith'}) self.assertCategories(chan, ['python', 'django']) self.assertChildNodeContent(chan, {'title': 'My blog', 'link': 'http://example.com/blog/'}) self.assertEqual(chan.getElementsByTagName('atom:link')[0].getAttribute('href'), 'http://example.com/syndication/rss2/') d = Entry.objects.get(pk=1).published ltz = tzinfo.LocalTimezone(d) pub_date = rfc2822_date(d.replace(tzinfo=ltz)) items = chan.getElementsByTagName('item') self.assertEqual(len(items), Entry.objects.count()) self.assertChildNodeContent(items[0], {'title': 'My first entry', 'description': 'Overridden description: My first entry', 'link': 'http://example.com/blog/1/', 'guid': 'http://example.com/blog/1/', 'pubDate': pub_date, 'author': '[email protected] (Sally Smith)'}) self.assertCategories(items[0], ['python', 'testing']) for item in items: self.assertChildNodes(item, ['title', 'link', 'description', 'guid', 'category', 'pubDate', 'author']) self.assertIsNone(item.getElementsByTagName('guid')[0].attributes.get('isPermaLink'))
def test_rss2_feed_guid_permalink_false(self): "\n Test if the 'isPermaLink' attribute of <guid> element of an item\n in the RSS feed is 'false'.\n " response = self.client.get('/syndication/rss2/guid_ispermalink_false/') doc = minidom.parseString(response.content) chan = doc.getElementsByTagName('rss')[0].getElementsByTagName('channel')[0] items = chan.getElementsByTagName('item') for item in items: self.assertEqual(item.getElementsByTagName('guid')[0].attributes.get('isPermaLink').value, 'false')
-4,946,709,179,231,778,000
Test if the 'isPermaLink' attribute of <guid> element of an item in the RSS feed is 'false'.
tests/syndication/tests.py
test_rss2_feed_guid_permalink_false
adambrenecki/django
python
def test_rss2_feed_guid_permalink_false(self): "\n Test if the 'isPermaLink' attribute of <guid> element of an item\n in the RSS feed is 'false'.\n " response = self.client.get('/syndication/rss2/guid_ispermalink_false/') doc = minidom.parseString(response.content) chan = doc.getElementsByTagName('rss')[0].getElementsByTagName('channel')[0] items = chan.getElementsByTagName('item') for item in items: self.assertEqual(item.getElementsByTagName('guid')[0].attributes.get('isPermaLink').value, 'false')
def test_rss2_feed_guid_permalink_true(self): "\n Test if the 'isPermaLink' attribute of <guid> element of an item\n in the RSS feed is 'true'.\n " response = self.client.get('/syndication/rss2/guid_ispermalink_true/') doc = minidom.parseString(response.content) chan = doc.getElementsByTagName('rss')[0].getElementsByTagName('channel')[0] items = chan.getElementsByTagName('item') for item in items: self.assertEqual(item.getElementsByTagName('guid')[0].attributes.get('isPermaLink').value, 'true')
-6,909,708,951,527,178,000
Test if the 'isPermaLink' attribute of <guid> element of an item in the RSS feed is 'true'.
tests/syndication/tests.py
test_rss2_feed_guid_permalink_true
adambrenecki/django
python
def test_rss2_feed_guid_permalink_true(self): "\n Test if the 'isPermaLink' attribute of <guid> element of an item\n in the RSS feed is 'true'.\n " response = self.client.get('/syndication/rss2/guid_ispermalink_true/') doc = minidom.parseString(response.content) chan = doc.getElementsByTagName('rss')[0].getElementsByTagName('channel')[0] items = chan.getElementsByTagName('item') for item in items: self.assertEqual(item.getElementsByTagName('guid')[0].attributes.get('isPermaLink').value, 'true')
def test_rss091_feed(self): '\n Test the structure and content of feeds generated by RssUserland091Feed.\n ' response = self.client.get('/syndication/rss091/') doc = minidom.parseString(response.content) feed_elem = doc.getElementsByTagName('rss') self.assertEqual(len(feed_elem), 1) feed = feed_elem[0] self.assertEqual(feed.getAttribute('version'), '0.91') chan_elem = feed.getElementsByTagName('channel') self.assertEqual(len(chan_elem), 1) chan = chan_elem[0] self.assertChildNodes(chan, ['title', 'link', 'description', 'language', 'lastBuildDate', 'item', 'atom:link', 'ttl', 'copyright', 'category']) self.assertChildNodeContent(chan, {'title': 'My blog', 'link': 'http://example.com/blog/'}) self.assertCategories(chan, ['python', 'django']) self.assertEqual(chan.getElementsByTagName('atom:link')[0].getAttribute('href'), 'http://example.com/syndication/rss091/') items = chan.getElementsByTagName('item') self.assertEqual(len(items), Entry.objects.count()) self.assertChildNodeContent(items[0], {'title': 'My first entry', 'description': 'Overridden description: My first entry', 'link': 'http://example.com/blog/1/'}) for item in items: self.assertChildNodes(item, ['title', 'link', 'description']) self.assertCategories(item, [])
-545,652,658,098,288,800
Test the structure and content of feeds generated by RssUserland091Feed.
tests/syndication/tests.py
test_rss091_feed
adambrenecki/django
python
def test_rss091_feed(self): '\n \n ' response = self.client.get('/syndication/rss091/') doc = minidom.parseString(response.content) feed_elem = doc.getElementsByTagName('rss') self.assertEqual(len(feed_elem), 1) feed = feed_elem[0] self.assertEqual(feed.getAttribute('version'), '0.91') chan_elem = feed.getElementsByTagName('channel') self.assertEqual(len(chan_elem), 1) chan = chan_elem[0] self.assertChildNodes(chan, ['title', 'link', 'description', 'language', 'lastBuildDate', 'item', 'atom:link', 'ttl', 'copyright', 'category']) self.assertChildNodeContent(chan, {'title': 'My blog', 'link': 'http://example.com/blog/'}) self.assertCategories(chan, ['python', 'django']) self.assertEqual(chan.getElementsByTagName('atom:link')[0].getAttribute('href'), 'http://example.com/syndication/rss091/') items = chan.getElementsByTagName('item') self.assertEqual(len(items), Entry.objects.count()) self.assertChildNodeContent(items[0], {'title': 'My first entry', 'description': 'Overridden description: My first entry', 'link': 'http://example.com/blog/1/'}) for item in items: self.assertChildNodes(item, ['title', 'link', 'description']) self.assertCategories(item, [])
def test_atom_feed(self): '\n Test the structure and content of feeds generated by Atom1Feed.\n ' response = self.client.get('/syndication/atom/') feed = minidom.parseString(response.content).firstChild self.assertEqual(feed.nodeName, 'feed') self.assertEqual(feed.getAttribute('xmlns'), 'http://www.w3.org/2005/Atom') self.assertChildNodes(feed, ['title', 'subtitle', 'link', 'id', 'updated', 'entry', 'rights', 'category', 'author']) for link in feed.getElementsByTagName('link'): if (link.getAttribute('rel') == 'self'): self.assertEqual(link.getAttribute('href'), 'http://example.com/syndication/atom/') entries = feed.getElementsByTagName('entry') self.assertEqual(len(entries), Entry.objects.count()) for entry in entries: self.assertChildNodes(entry, ['title', 'link', 'id', 'summary', 'category', 'updated', 'published', 'rights', 'author']) summary = entry.getElementsByTagName('summary')[0] self.assertEqual(summary.getAttribute('type'), 'html')
-874,858,660,524,445,000
Test the structure and content of feeds generated by Atom1Feed.
tests/syndication/tests.py
test_atom_feed
adambrenecki/django
python
def test_atom_feed(self): '\n \n ' response = self.client.get('/syndication/atom/') feed = minidom.parseString(response.content).firstChild self.assertEqual(feed.nodeName, 'feed') self.assertEqual(feed.getAttribute('xmlns'), 'http://www.w3.org/2005/Atom') self.assertChildNodes(feed, ['title', 'subtitle', 'link', 'id', 'updated', 'entry', 'rights', 'category', 'author']) for link in feed.getElementsByTagName('link'): if (link.getAttribute('rel') == 'self'): self.assertEqual(link.getAttribute('href'), 'http://example.com/syndication/atom/') entries = feed.getElementsByTagName('entry') self.assertEqual(len(entries), Entry.objects.count()) for entry in entries: self.assertChildNodes(entry, ['title', 'link', 'id', 'summary', 'category', 'updated', 'published', 'rights', 'author']) summary = entry.getElementsByTagName('summary')[0] self.assertEqual(summary.getAttribute('type'), 'html')
def test_atom_feed_published_and_updated_elements(self): '\n Test that the published and updated elements are not\n the same and now adhere to RFC 4287.\n ' response = self.client.get('/syndication/atom/') feed = minidom.parseString(response.content).firstChild entries = feed.getElementsByTagName('entry') published = entries[0].getElementsByTagName('published')[0].firstChild.wholeText updated = entries[0].getElementsByTagName('updated')[0].firstChild.wholeText self.assertNotEqual(published, updated)
1,590,432,598,452,745,000
Test that the published and updated elements are not the same and now adhere to RFC 4287.
tests/syndication/tests.py
test_atom_feed_published_and_updated_elements
adambrenecki/django
python
def test_atom_feed_published_and_updated_elements(self): '\n Test that the published and updated elements are not\n the same and now adhere to RFC 4287.\n ' response = self.client.get('/syndication/atom/') feed = minidom.parseString(response.content).firstChild entries = feed.getElementsByTagName('entry') published = entries[0].getElementsByTagName('published')[0].firstChild.wholeText updated = entries[0].getElementsByTagName('updated')[0].firstChild.wholeText self.assertNotEqual(published, updated)
def test_latest_post_date(self): '\n Test that both the published and updated dates are\n considered when determining the latest post date.\n ' response = self.client.get('/syndication/atom/') feed = minidom.parseString(response.content).firstChild updated = feed.getElementsByTagName('updated')[0].firstChild.wholeText d = Entry.objects.latest('published').published ltz = tzinfo.LocalTimezone(d) latest_published = rfc3339_date(d.replace(tzinfo=ltz)) self.assertEqual(updated, latest_published) response = self.client.get('/syndication/latest/') feed = minidom.parseString(response.content).firstChild updated = feed.getElementsByTagName('updated')[0].firstChild.wholeText d = Entry.objects.exclude(pk=5).latest('updated').updated ltz = tzinfo.LocalTimezone(d) latest_updated = rfc3339_date(d.replace(tzinfo=ltz)) self.assertEqual(updated, latest_updated)
1,292,039,728,651,972,900
Test that both the published and updated dates are considered when determining the latest post date.
tests/syndication/tests.py
test_latest_post_date
adambrenecki/django
python
def test_latest_post_date(self): '\n Test that both the published and updated dates are\n considered when determining the latest post date.\n ' response = self.client.get('/syndication/atom/') feed = minidom.parseString(response.content).firstChild updated = feed.getElementsByTagName('updated')[0].firstChild.wholeText d = Entry.objects.latest('published').published ltz = tzinfo.LocalTimezone(d) latest_published = rfc3339_date(d.replace(tzinfo=ltz)) self.assertEqual(updated, latest_published) response = self.client.get('/syndication/latest/') feed = minidom.parseString(response.content).firstChild updated = feed.getElementsByTagName('updated')[0].firstChild.wholeText d = Entry.objects.exclude(pk=5).latest('updated').updated ltz = tzinfo.LocalTimezone(d) latest_updated = rfc3339_date(d.replace(tzinfo=ltz)) self.assertEqual(updated, latest_updated)
def test_title_escaping(self): '\n Tests that titles are escaped correctly in RSS feeds.\n ' response = self.client.get('/syndication/rss2/') doc = minidom.parseString(response.content) for item in doc.getElementsByTagName('item'): link = item.getElementsByTagName('link')[0] if (link.firstChild.wholeText == 'http://example.com/blog/4/'): title = item.getElementsByTagName('title')[0] self.assertEqual(title.firstChild.wholeText, 'A &amp; B &lt; C &gt; D')
2,124,938,666,503,127,300
Tests that titles are escaped correctly in RSS feeds.
tests/syndication/tests.py
test_title_escaping
adambrenecki/django
python
def test_title_escaping(self): '\n \n ' response = self.client.get('/syndication/rss2/') doc = minidom.parseString(response.content) for item in doc.getElementsByTagName('item'): link = item.getElementsByTagName('link')[0] if (link.firstChild.wholeText == 'http://example.com/blog/4/'): title = item.getElementsByTagName('title')[0] self.assertEqual(title.firstChild.wholeText, 'A &amp; B &lt; C &gt; D')
def test_naive_datetime_conversion(self): '\n Test that datetimes are correctly converted to the local time zone.\n ' response = self.client.get('/syndication/naive-dates/') doc = minidom.parseString(response.content) updated = doc.getElementsByTagName('updated')[0].firstChild.wholeText d = Entry.objects.latest('published').published ltz = tzinfo.LocalTimezone(d) latest = rfc3339_date(d.replace(tzinfo=ltz)) self.assertEqual(updated, latest)
5,615,690,422,344,322,000
Test that datetimes are correctly converted to the local time zone.
tests/syndication/tests.py
test_naive_datetime_conversion
adambrenecki/django
python
def test_naive_datetime_conversion(self): '\n \n ' response = self.client.get('/syndication/naive-dates/') doc = minidom.parseString(response.content) updated = doc.getElementsByTagName('updated')[0].firstChild.wholeText d = Entry.objects.latest('published').published ltz = tzinfo.LocalTimezone(d) latest = rfc3339_date(d.replace(tzinfo=ltz)) self.assertEqual(updated, latest)
def test_aware_datetime_conversion(self): "\n Test that datetimes with timezones don't get trodden on.\n " response = self.client.get('/syndication/aware-dates/') doc = minidom.parseString(response.content) published = doc.getElementsByTagName('published')[0].firstChild.wholeText self.assertEqual(published[(- 6):], '+00:42')
4,266,143,746,981,813,000
Test that datetimes with timezones don't get trodden on.
tests/syndication/tests.py
test_aware_datetime_conversion
adambrenecki/django
python
def test_aware_datetime_conversion(self): "\n \n " response = self.client.get('/syndication/aware-dates/') doc = minidom.parseString(response.content) published = doc.getElementsByTagName('published')[0].firstChild.wholeText self.assertEqual(published[(- 6):], '+00:42')
def test_feed_url(self): '\n Test that the feed_url can be overridden.\n ' response = self.client.get('/syndication/feedurl/') doc = minidom.parseString(response.content) for link in doc.getElementsByTagName('link'): if (link.getAttribute('rel') == 'self'): self.assertEqual(link.getAttribute('href'), 'http://example.com/customfeedurl/')
-4,254,890,965,348,122,600
Test that the feed_url can be overridden.
tests/syndication/tests.py
test_feed_url
adambrenecki/django
python
def test_feed_url(self): '\n \n ' response = self.client.get('/syndication/feedurl/') doc = minidom.parseString(response.content) for link in doc.getElementsByTagName('link'): if (link.getAttribute('rel') == 'self'): self.assertEqual(link.getAttribute('href'), 'http://example.com/customfeedurl/')
def test_secure_urls(self): '\n Test URLs are prefixed with https:// when feed is requested over HTTPS.\n ' response = self.client.get('/syndication/rss2/', **{'wsgi.url_scheme': 'https'}) doc = minidom.parseString(response.content) chan = doc.getElementsByTagName('channel')[0] self.assertEqual(chan.getElementsByTagName('link')[0].firstChild.wholeText[0:5], 'https') atom_link = chan.getElementsByTagName('atom:link')[0] self.assertEqual(atom_link.getAttribute('href')[0:5], 'https') for link in doc.getElementsByTagName('link'): if (link.getAttribute('rel') == 'self'): self.assertEqual(link.getAttribute('href')[0:5], 'https')
4,286,342,682,963,835,400
Test URLs are prefixed with https:// when feed is requested over HTTPS.
tests/syndication/tests.py
test_secure_urls
adambrenecki/django
python
def test_secure_urls(self): '\n \n ' response = self.client.get('/syndication/rss2/', **{'wsgi.url_scheme': 'https'}) doc = minidom.parseString(response.content) chan = doc.getElementsByTagName('channel')[0] self.assertEqual(chan.getElementsByTagName('link')[0].firstChild.wholeText[0:5], 'https') atom_link = chan.getElementsByTagName('atom:link')[0] self.assertEqual(atom_link.getAttribute('href')[0:5], 'https') for link in doc.getElementsByTagName('link'): if (link.getAttribute('rel') == 'self'): self.assertEqual(link.getAttribute('href')[0:5], 'https')
def test_item_link_error(self): '\n Test that a ImproperlyConfigured is raised if no link could be found\n for the item(s).\n ' self.assertRaises(ImproperlyConfigured, self.client.get, '/syndication/articles/')
4,261,096,110,716,304,400
Test that a ImproperlyConfigured is raised if no link could be found for the item(s).
tests/syndication/tests.py
test_item_link_error
adambrenecki/django
python
def test_item_link_error(self): '\n Test that a ImproperlyConfigured is raised if no link could be found\n for the item(s).\n ' self.assertRaises(ImproperlyConfigured, self.client.get, '/syndication/articles/')
def test_template_feed(self): '\n Test that the item title and description can be overridden with\n templates.\n ' response = self.client.get('/syndication/template/') doc = minidom.parseString(response.content) feed = doc.getElementsByTagName('rss')[0] chan = feed.getElementsByTagName('channel')[0] items = chan.getElementsByTagName('item') self.assertChildNodeContent(items[0], {'title': 'Title in your templates: My first entry', 'description': 'Description in your templates: My first entry', 'link': 'http://example.com/blog/1/'})
-8,862,071,585,553,255,000
Test that the item title and description can be overridden with templates.
tests/syndication/tests.py
test_template_feed
adambrenecki/django
python
def test_template_feed(self): '\n Test that the item title and description can be overridden with\n templates.\n ' response = self.client.get('/syndication/template/') doc = minidom.parseString(response.content) feed = doc.getElementsByTagName('rss')[0] chan = feed.getElementsByTagName('channel')[0] items = chan.getElementsByTagName('item') self.assertChildNodeContent(items[0], {'title': 'Title in your templates: My first entry', 'description': 'Description in your templates: My first entry', 'link': 'http://example.com/blog/1/'})
def test_template_context_feed(self): '\n Test that custom context data can be passed to templates for title\n and description.\n ' response = self.client.get('/syndication/template_context/') doc = minidom.parseString(response.content) feed = doc.getElementsByTagName('rss')[0] chan = feed.getElementsByTagName('channel')[0] items = chan.getElementsByTagName('item') self.assertChildNodeContent(items[0], {'title': 'My first entry (foo is bar)', 'description': 'My first entry (foo is bar)'})
-624,403,934,501,047,800
Test that custom context data can be passed to templates for title and description.
tests/syndication/tests.py
test_template_context_feed
adambrenecki/django
python
def test_template_context_feed(self): '\n Test that custom context data can be passed to templates for title\n and description.\n ' response = self.client.get('/syndication/template_context/') doc = minidom.parseString(response.content) feed = doc.getElementsByTagName('rss')[0] chan = feed.getElementsByTagName('channel')[0] items = chan.getElementsByTagName('item') self.assertChildNodeContent(items[0], {'title': 'My first entry (foo is bar)', 'description': 'My first entry (foo is bar)'})
def test_add_domain(self): '\n Test add_domain() prefixes domains onto the correct URLs.\n ' self.assertEqual(views.add_domain('example.com', '/foo/?arg=value'), 'http://example.com/foo/?arg=value') self.assertEqual(views.add_domain('example.com', '/foo/?arg=value', True), 'https://example.com/foo/?arg=value') self.assertEqual(views.add_domain('example.com', 'http://djangoproject.com/doc/'), 'http://djangoproject.com/doc/') self.assertEqual(views.add_domain('example.com', 'https://djangoproject.com/doc/'), 'https://djangoproject.com/doc/') self.assertEqual(views.add_domain('example.com', 'mailto:[email protected]'), 'mailto:[email protected]') self.assertEqual(views.add_domain('example.com', '//example.com/foo/?arg=value'), 'http://example.com/foo/?arg=value')
-5,421,137,914,039,013,000
Test add_domain() prefixes domains onto the correct URLs.
tests/syndication/tests.py
test_add_domain
adambrenecki/django
python
def test_add_domain(self): '\n \n ' self.assertEqual(views.add_domain('example.com', '/foo/?arg=value'), 'http://example.com/foo/?arg=value') self.assertEqual(views.add_domain('example.com', '/foo/?arg=value', True), 'https://example.com/foo/?arg=value') self.assertEqual(views.add_domain('example.com', 'http://djangoproject.com/doc/'), 'http://djangoproject.com/doc/') self.assertEqual(views.add_domain('example.com', 'https://djangoproject.com/doc/'), 'https://djangoproject.com/doc/') self.assertEqual(views.add_domain('example.com', 'mailto:[email protected]'), 'mailto:[email protected]') self.assertEqual(views.add_domain('example.com', '//example.com/foo/?arg=value'), 'http://example.com/foo/?arg=value')
def PsdFun2Noise_1d(N, dx, PsdFun, PsdArgs): '\n\t\tGenerates a noise pattern based an the Power spectral density returned\n\t\tby PsdFun\n\t' x = np.arange(0, ((N // 2) + 1), dx) yHalf = PsdFun(x, *PsdArgs) y = Psd2NoisePattern_1d(yHalf, Semiaxis=True) return (x, y)
-7,011,816,010,989,427,000
Generates a noise pattern based an the Power spectral density returned by PsdFun
wiselib2/Noise.py
PsdFun2Noise_1d
WISE-Project/wiselib2
python
def PsdFun2Noise_1d(N, dx, PsdFun, PsdArgs): '\n\t\tGenerates a noise pattern based an the Power spectral density returned\n\t\tby PsdFun\n\t' x = np.arange(0, ((N // 2) + 1), dx) yHalf = PsdFun(x, *PsdArgs) y = Psd2NoisePattern_1d(yHalf, Semiaxis=True) return (x, y)
def PsdArray2Noise_1d_v2(f_in, Psd_in, L_mm, N): '\n\t\tReturns meters\n\t' from scipy import interpolate log = np.log fft = np.fft.fft fftshift = np.fft.fftshift ff = f_in yy = Psd_in L = L_mm N = int(N) N2 = int((N // 2)) L = 300 L_um = (L * 1000.0) L_nm = (L * 1000000.0) fMin = (1 / L_um) fSpline = (((np.arange(N2) / N2) * (max(ff) - min(ff))) + min(ff)) fun = interpolate.splrep(log(ff), log(yy), s=2) yPsd_log = interpolate.splev(log(fSpline), fun) ySpline = np.exp(yPsd_log) yPsd = ySpline yPsd[(fSpline < ff[0])] = 200 n = len(yPsd) plt.plot(fSpline, yPsd, '-') plt.plot(ff, yy, 'x') plt.legend(['ySpline', 'Data']) ax = plt.axes() import scipy.integrate as integrate RMS = np.sqrt(integrate.trapz(yPsd, (fSpline / 1000))) yPsd_reverse = yPsd[::(- 1)] ell = (1 / (fSpline[1] - fSpline[0])) if ((N % 2) == 0): yPsd2 = np.hstack((yPsd_reverse, 0, yPsd[0:(- 1)])) else: yPsd2 = np.hstack((yPsd_reverse, 0, yPsd)) yPsd2Norm = np.sqrt(((yPsd2 / ell) / 1000)) n_ = len(yPsd2) print(('len(yPsd2) = %0.2d' % len(yPsd2Norm))) phi = ((2 * np.pi) * np.random.rand(n_)) r = np.exp((1j * phi)) yPsd2Norm_ = fftshift(yPsd2Norm) yRaf = np.fft.fft((r * yPsd2Norm_)) yRaf = np.real(yRaf) print(('Rms = %0.2e nm' % np.std(yRaf))) plt.plot(yPsd2Norm_) print(('max yPsd_ = %d nm' % max(yPsd2))) print(('max yPsd2Norm = %0.4f nm' % max(yPsd2Norm))) print(('Rms yRaf2 = %0.2e nm' % np.std(yRaf))) return (yRaf * 1e-09)
-1,840,286,027,527,266,000
Returns meters
wiselib2/Noise.py
PsdArray2Noise_1d_v2
WISE-Project/wiselib2
python
def PsdArray2Noise_1d_v2(f_in, Psd_in, L_mm, N): '\n\t\t\n\t' from scipy import interpolate log = np.log fft = np.fft.fft fftshift = np.fft.fftshift ff = f_in yy = Psd_in L = L_mm N = int(N) N2 = int((N // 2)) L = 300 L_um = (L * 1000.0) L_nm = (L * 1000000.0) fMin = (1 / L_um) fSpline = (((np.arange(N2) / N2) * (max(ff) - min(ff))) + min(ff)) fun = interpolate.splrep(log(ff), log(yy), s=2) yPsd_log = interpolate.splev(log(fSpline), fun) ySpline = np.exp(yPsd_log) yPsd = ySpline yPsd[(fSpline < ff[0])] = 200 n = len(yPsd) plt.plot(fSpline, yPsd, '-') plt.plot(ff, yy, 'x') plt.legend(['ySpline', 'Data']) ax = plt.axes() import scipy.integrate as integrate RMS = np.sqrt(integrate.trapz(yPsd, (fSpline / 1000))) yPsd_reverse = yPsd[::(- 1)] ell = (1 / (fSpline[1] - fSpline[0])) if ((N % 2) == 0): yPsd2 = np.hstack((yPsd_reverse, 0, yPsd[0:(- 1)])) else: yPsd2 = np.hstack((yPsd_reverse, 0, yPsd)) yPsd2Norm = np.sqrt(((yPsd2 / ell) / 1000)) n_ = len(yPsd2) print(('len(yPsd2) = %0.2d' % len(yPsd2Norm))) phi = ((2 * np.pi) * np.random.rand(n_)) r = np.exp((1j * phi)) yPsd2Norm_ = fftshift(yPsd2Norm) yRaf = np.fft.fft((r * yPsd2Norm_)) yRaf = np.real(yRaf) print(('Rms = %0.2e nm' % np.std(yRaf))) plt.plot(yPsd2Norm_) print(('max yPsd_ = %d nm' % max(yPsd2))) print(('max yPsd2Norm = %0.4f nm' % max(yPsd2Norm))) print(('Rms yRaf2 = %0.2e nm' % np.std(yRaf))) return (yRaf * 1e-09)
def PsdArray2Noise_1d(PsdArray, N, Semiaxis=True, Real=True): '\n\tGenerates a noise pattern whose Power Spectral density is given by Psd.\n\n\tParameters\n\t---------------------\n\tPsd : 1d array\n\t\tContains the numeric Psd (treated as evenly spaced array)\n\n\tSemiaxis :\n\t\t0 : does nothing\n\t\t1 : halvens Pds, then replicates the halven part for left frequencies,\n\t\t\tproducing an output as long as Psd\n\t\t2 : replicates all Pds for lef frequencies as well, producing an output\n\t\t\ttwice as long as Psd\n\tReal : boolean\n\t\tIf True, the real part of the output is returned (default)\n\n\tReturns:\n\t---------------------\n\t\tAn array of the same length of Psd\n\t' if (Semiaxis == True): yHalf = PsdArray PsdArrayNew = np.hstack((yHalf[(- 1):0:(- 1)], yHalf)) idelta = (len(PsdArrayNew) - N) if (idelta == 1): PsdArrayNew = PsdArrayNew[0:(- 1)] elif (idelta == 0): pass else: print(('Error! len(PsdArrayNew) - len(PsdArray) = %0d' % idelta)) y = np.fft.fftshift(PsdArrayNew) r = ((2 * np.pi) * np.random.rand(len(PsdArrayNew))) f = np.fft.ifft((y * np.exp((1j * r)))) if Real: return np.real(f) else: return f
5,233,483,730,959,706,000
Generates a noise pattern whose Power Spectral density is given by Psd. Parameters --------------------- Psd : 1d array Contains the numeric Psd (treated as evenly spaced array) Semiaxis : 0 : does nothing 1 : halvens Pds, then replicates the halven part for left frequencies, producing an output as long as Psd 2 : replicates all Pds for lef frequencies as well, producing an output twice as long as Psd Real : boolean If True, the real part of the output is returned (default) Returns: --------------------- An array of the same length of Psd
wiselib2/Noise.py
PsdArray2Noise_1d
WISE-Project/wiselib2
python
def PsdArray2Noise_1d(PsdArray, N, Semiaxis=True, Real=True): '\n\tGenerates a noise pattern whose Power Spectral density is given by Psd.\n\n\tParameters\n\t---------------------\n\tPsd : 1d array\n\t\tContains the numeric Psd (treated as evenly spaced array)\n\n\tSemiaxis :\n\t\t0 : does nothing\n\t\t1 : halvens Pds, then replicates the halven part for left frequencies,\n\t\t\tproducing an output as long as Psd\n\t\t2 : replicates all Pds for lef frequencies as well, producing an output\n\t\t\ttwice as long as Psd\n\tReal : boolean\n\t\tIf True, the real part of the output is returned (default)\n\n\tReturns:\n\t---------------------\n\t\tAn array of the same length of Psd\n\t' if (Semiaxis == True): yHalf = PsdArray PsdArrayNew = np.hstack((yHalf[(- 1):0:(- 1)], yHalf)) idelta = (len(PsdArrayNew) - N) if (idelta == 1): PsdArrayNew = PsdArrayNew[0:(- 1)] elif (idelta == 0): pass else: print(('Error! len(PsdArrayNew) - len(PsdArray) = %0d' % idelta)) y = np.fft.fftshift(PsdArrayNew) r = ((2 * np.pi) * np.random.rand(len(PsdArrayNew))) f = np.fft.ifft((y * np.exp((1j * r)))) if Real: return np.real(f) else: return f
def GaussianNoise_1d(N, dx, Sigma): '\n\tPSD(f) = np.exp(-0.5^f/Sigma^2)\n\t' x = np.linspace((((- N) // 2) * dx), ((N // 2) - (1 * dx)), N) y = np.exp((((- 0.5) * (x ** 2)) / (Sigma ** 2))) return Psd2NoisePattern_1d(y)
112,063,719,355,874,370
PSD(f) = np.exp(-0.5^f/Sigma^2)
wiselib2/Noise.py
GaussianNoise_1d
WISE-Project/wiselib2
python
def GaussianNoise_1d(N, dx, Sigma): '\n\t\n\t' x = np.linspace((((- N) // 2) * dx), ((N // 2) - (1 * dx)), N) y = np.exp((((- 0.5) * (x ** 2)) / (Sigma ** 2))) return Psd2NoisePattern_1d(y)
def PowerLawNoise_1d(N, dx, a, b): '\n\tPSD(x) = a*x^b\n\t' x = np.arange(0, ((N // 2) + 1), dx) yHalf = (a * (x ** b)) return Psd2NoisePattern_1d(y, Semiaxis=True)
-6,643,562,930,226,485,000
PSD(x) = a*x^b
wiselib2/Noise.py
PowerLawNoise_1d
WISE-Project/wiselib2
python
def PowerLawNoise_1d(N, dx, a, b): '\n\t\n\t' x = np.arange(0, ((N // 2) + 1), dx) yHalf = (a * (x ** b)) return Psd2NoisePattern_1d(y, Semiaxis=True)
def FitPowerLaw(x, y): '\n\tFits the input data in the form\n\t\ty = a*x^b\n\treturns a,b\n\t' import scipy.optimize as optimize fFit = (lambda p, x: (p[0] * (x ** p[1]))) fErr = (lambda p, x, y: (y - fFit(p, x))) p0 = [max(y), (- 1.0)] out = optimize.leastsq(fErr, p0, args=(x, y), full_output=1) pOut = out[0] b = pOut[1] a = pOut[0] return (a, b)
3,532,495,610,609,890,000
Fits the input data in the form y = a*x^b returns a,b
wiselib2/Noise.py
FitPowerLaw
WISE-Project/wiselib2
python
def FitPowerLaw(x, y): '\n\tFits the input data in the form\n\t\ty = a*x^b\n\treturns a,b\n\t' import scipy.optimize as optimize fFit = (lambda p, x: (p[0] * (x ** p[1]))) fErr = (lambda p, x, y: (y - fFit(p, x))) p0 = [max(y), (- 1.0)] out = optimize.leastsq(fErr, p0, args=(x, y), full_output=1) pOut = out[0] b = pOut[1] a = pOut[0] return (a, b)
@PsdType.setter def PsdType(self, Val): '\n\t\tNote: each time that the Property value is set, self.CutoffLowHigh is\n\t\treset, is specified by options\n\t\t' self._PsdType = Val if (self.Options.AUTO_RESET_CUTOFF_ON_PSDTYPE_CHANGE == True): self.PsdCutoffLowHigh = [None, None]
7,852,885,694,877,605,000
Note: each time that the Property value is set, self.CutoffLowHigh is reset, is specified by options
wiselib2/Noise.py
PsdType
WISE-Project/wiselib2
python
@PsdType.setter def PsdType(self, Val): '\n\t\tNote: each time that the Property value is set, self.CutoffLowHigh is\n\t\treset, is specified by options\n\t\t' self._PsdType = Val if (self.Options.AUTO_RESET_CUTOFF_ON_PSDTYPE_CHANGE == True): self.PsdCutoffLowHigh = [None, None]
def PsdEval(self, N, df, CutoffLowHigh=[None, None]): "\n\t\tEvals the PSD in the range [0 - N*df]\n\t\tIt's good custom to have PSD[0] = 0, so that the noise pattern is\n\t\tzero-mean.\n\n\t\tParameters:\n\t\t----------------------\n\t\t\tN : int\n\t\t\t\t#of samples\n\t\t\tdf : float\n\t\t\t\tspacing of spatial frequencies (df=1/TotalLength)\n\t\t\tCutoffLowHigh : [LowCutoff, HighCutoff]\n\t\t\t\tif >0, then Psd(f<Cutoff) is set to 0.\n\t\t\t\t\t\tif None, then LowCutoff = min()\n\t\tReturns : fAll, yPsdAll\n\t\t----------------------\n\t\t\tfAll : 1d array\n\t\t\t\tcontains the spatial frequencies\n\t\t\tyPsd : 1d array\n\t\t\t\tcontains the Psd\n\t\t" '\n\t\tThe Pdf is evaluated only within LowCutoff and HoghCutoff\n\t\tIf the Pdf is PsdFuns.Interp, then LowCutoff and HighCutoff are\n\t\tautomatically set to min and max values of the experimental data\n\t\t' StrMessage = '' def GetInRange(fAll, LowCutoff, HighCutoff): _tmpa = (fAll >= LowCutoff) _tmpb = (fAll <= HighCutoff) fMid_Pos = np.all([_tmpa, _tmpb], 0) fMid = fAll[fMid_Pos] return (fMid_Pos, fMid) (LowCutoff, HighCutoff) = CutoffLowHigh fMin = 0 fMax = ((N - 1) * df) fAll = np.linspace(0, fMax, N) yPsdAll = (fAll * 0) LowCutoff = (0 if (LowCutoff is None) else LowCutoff) HighCutoff = ((N * df) if (HighCutoff is None) else HighCutoff) if (self.PsdType == PsdFuns.Interp): if (self.Options.FIT_NUMERIC_DATA_WITH_POWER_LAW == True): (xFreq, y) = self.NumericPsdGetXY() p = FitPowerLaw((1 / xFreq), y) _PsdParams = (p[0], (- p[1])) LowCutoff = np.amin(self._PsdNumericX) HighCutoff = np.amin(self._PsdNumericX) (fMid_Pos, fMid) = GetInRange(fAll, LowCutoff, HighCutoff) yPsd = PsdFuns.PowerLaw(fMid, *_PsdParams) else: LowVal = np.amin(self._PsdNumericX) HighVal = np.amax(self._PsdNumericX) LowCutoff = (LowVal if (LowCutoff <= LowVal) else LowCutoff) HighCutoff = (HighVal if (HighCutoff >= HighVal) else HighCutoff) (fMid_Pos, fMid) = GetInRange(fAll, LowCutoff, HighCutoff) yPsd = PsdFuns.Interp(fMid, self._PsdNumericX, self._PsdNumericY) else: (fMid_Pos, fMid) = GetInRange(fAll, LowCutoff, HighCutoff) yPsd = self.PsdType(fMid, *self.PsdParams) yPsdAll[fMid_Pos] = yPsd return (fAll, yPsdAll)
2,593,070,767,553,816,000
Evals the PSD in the range [0 - N*df] It's good custom to have PSD[0] = 0, so that the noise pattern is zero-mean. Parameters: ---------------------- N : int #of samples df : float spacing of spatial frequencies (df=1/TotalLength) CutoffLowHigh : [LowCutoff, HighCutoff] if >0, then Psd(f<Cutoff) is set to 0. if None, then LowCutoff = min() Returns : fAll, yPsdAll ---------------------- fAll : 1d array contains the spatial frequencies yPsd : 1d array contains the Psd
wiselib2/Noise.py
PsdEval
WISE-Project/wiselib2
python
def PsdEval(self, N, df, CutoffLowHigh=[None, None]): "\n\t\tEvals the PSD in the range [0 - N*df]\n\t\tIt's good custom to have PSD[0] = 0, so that the noise pattern is\n\t\tzero-mean.\n\n\t\tParameters:\n\t\t----------------------\n\t\t\tN : int\n\t\t\t\t#of samples\n\t\t\tdf : float\n\t\t\t\tspacing of spatial frequencies (df=1/TotalLength)\n\t\t\tCutoffLowHigh : [LowCutoff, HighCutoff]\n\t\t\t\tif >0, then Psd(f<Cutoff) is set to 0.\n\t\t\t\t\t\tif None, then LowCutoff = min()\n\t\tReturns : fAll, yPsdAll\n\t\t----------------------\n\t\t\tfAll : 1d array\n\t\t\t\tcontains the spatial frequencies\n\t\t\tyPsd : 1d array\n\t\t\t\tcontains the Psd\n\t\t" '\n\t\tThe Pdf is evaluated only within LowCutoff and HoghCutoff\n\t\tIf the Pdf is PsdFuns.Interp, then LowCutoff and HighCutoff are\n\t\tautomatically set to min and max values of the experimental data\n\t\t' StrMessage = def GetInRange(fAll, LowCutoff, HighCutoff): _tmpa = (fAll >= LowCutoff) _tmpb = (fAll <= HighCutoff) fMid_Pos = np.all([_tmpa, _tmpb], 0) fMid = fAll[fMid_Pos] return (fMid_Pos, fMid) (LowCutoff, HighCutoff) = CutoffLowHigh fMin = 0 fMax = ((N - 1) * df) fAll = np.linspace(0, fMax, N) yPsdAll = (fAll * 0) LowCutoff = (0 if (LowCutoff is None) else LowCutoff) HighCutoff = ((N * df) if (HighCutoff is None) else HighCutoff) if (self.PsdType == PsdFuns.Interp): if (self.Options.FIT_NUMERIC_DATA_WITH_POWER_LAW == True): (xFreq, y) = self.NumericPsdGetXY() p = FitPowerLaw((1 / xFreq), y) _PsdParams = (p[0], (- p[1])) LowCutoff = np.amin(self._PsdNumericX) HighCutoff = np.amin(self._PsdNumericX) (fMid_Pos, fMid) = GetInRange(fAll, LowCutoff, HighCutoff) yPsd = PsdFuns.PowerLaw(fMid, *_PsdParams) else: LowVal = np.amin(self._PsdNumericX) HighVal = np.amax(self._PsdNumericX) LowCutoff = (LowVal if (LowCutoff <= LowVal) else LowCutoff) HighCutoff = (HighVal if (HighCutoff >= HighVal) else HighCutoff) (fMid_Pos, fMid) = GetInRange(fAll, LowCutoff, HighCutoff) yPsd = PsdFuns.Interp(fMid, self._PsdNumericX, self._PsdNumericY) else: (fMid_Pos, fMid) = GetInRange(fAll, LowCutoff, HighCutoff) yPsd = self.PsdType(fMid, *self.PsdParams) yPsdAll[fMid_Pos] = yPsd return (fAll, yPsdAll)
def MakeProfile(self, L, N): '\n\t\t\tEvaluates the psd according to .PsdType, .PsdParams and .Options directives\n\t\t\tReturns an evenly-spaced array.\n\t\t\tIf PsdType = NumericArray, linear interpolation is performed.\n\n\t\t\t:PARAM: N: # of samples\n\t\t\t:PARAM: dx: grid spacing (spatial frequency)\n\n\t\t\treturns:\n\t\t\t\t1d arr\n\t\t' if (self.PsdType == PsdFuns.Interp): L_mm = (L * 1000.0) yRoughness = PsdArray2Noise_1d_v2(self._PsdNumericX, self._PsdNumericY, L_mm, N) else: print('Irreversible error. The code was not completed to handle this instance') return (yRoughness * self.ProfileScaling)
7,969,343,420,991,885,000
Evaluates the psd according to .PsdType, .PsdParams and .Options directives Returns an evenly-spaced array. If PsdType = NumericArray, linear interpolation is performed. :PARAM: N: # of samples :PARAM: dx: grid spacing (spatial frequency) returns: 1d arr
wiselib2/Noise.py
MakeProfile
WISE-Project/wiselib2
python
def MakeProfile(self, L, N): '\n\t\t\tEvaluates the psd according to .PsdType, .PsdParams and .Options directives\n\t\t\tReturns an evenly-spaced array.\n\t\t\tIf PsdType = NumericArray, linear interpolation is performed.\n\n\t\t\t:PARAM: N: # of samples\n\t\t\t:PARAM: dx: grid spacing (spatial frequency)\n\n\t\t\treturns:\n\t\t\t\t1d arr\n\t\t' if (self.PsdType == PsdFuns.Interp): L_mm = (L * 1000.0) yRoughness = PsdArray2Noise_1d_v2(self._PsdNumericX, self._PsdNumericY, L_mm, N) else: print('Irreversible error. The code was not completed to handle this instance') return (yRoughness * self.ProfileScaling)
def NumericPsdLoadXY(self, FilePath, xScaling=1, yScaling=1, xIsSpatialFreq=True): ' @TODO: specificare formati e tipi di file\n\n\t\tParameters\n\t\t----------------------------\n\t\txIsSpatialFreq : bool\n\t\t\t\t\t\ttrue If the first column (Read_x_values) contains spatial\n\t\t\t\t\t\tfrequencies. False if it contains lenghts. Default = True\n\t\txScaling, yScaling: floats\n\t\t\t\t\t\tRead_x_values => Read_x_values * xScaling\n\n\t\t\t\t\t\tRead_y_values => Read_y_values * yScaling\n\n\t\t\t\t\t\tSometimes, properly setting the x and y scaling values may be confusing (although just matter of high-school considerations). On this purpose, the property .RoughnessMaker.ProfileScaling property can be used also..ProfileScaling is the scale factor that acts on the output of MakeProfile() function only.\n\n\t\tremarks\n\t\t\t\t\t\t--------\n\t\t\t\t\t\tpippo\n\n\t\t' try: self._IsNumericPsdInFreq = xIsSpatialFreq s = np.loadtxt(FilePath) x = s[:, 0] y = s[:, 1] x = (x * xScaling) y = (y * yScaling) if (xIsSpatialFreq == False): f = (1 / x) else: f = x i = np.argsort(f) f = f[i] y = y[i] self.PsdCutoffLowHigh = [np.amin, np.amax(f)] self.PsdType = PsdFuns.Interp self.PsdParams = [f, y] self.NumericPsdSetXY(f, y) except: pass def Generate(self, N=None, dx=None, CutoffLowHigh=[None, None]): '\n\t\t\tParameters\n\t\t\t\tN: # of output samples\n\t\t\t\tdx: step of the x axis\n\t\t\tNote: generates an evenly spaced array\n\t\t\t' L = (dx * N) df = (1 / L) (fPsd, yPsd) = self.PsdEval(((N // 2) + 1), df=df, CutoffLowHigh=CutoffLowHigh) h = Psd2Noise_1d(yPsd, Semiaxis=True) return h
7,687,502,643,909,254,000
@TODO: specificare formati e tipi di file Parameters ---------------------------- xIsSpatialFreq : bool true If the first column (Read_x_values) contains spatial frequencies. False if it contains lenghts. Default = True xScaling, yScaling: floats Read_x_values => Read_x_values * xScaling Read_y_values => Read_y_values * yScaling Sometimes, properly setting the x and y scaling values may be confusing (although just matter of high-school considerations). On this purpose, the property .RoughnessMaker.ProfileScaling property can be used also..ProfileScaling is the scale factor that acts on the output of MakeProfile() function only. remarks -------- pippo
wiselib2/Noise.py
NumericPsdLoadXY
WISE-Project/wiselib2
python
def NumericPsdLoadXY(self, FilePath, xScaling=1, yScaling=1, xIsSpatialFreq=True): ' @TODO: specificare formati e tipi di file\n\n\t\tParameters\n\t\t----------------------------\n\t\txIsSpatialFreq : bool\n\t\t\t\t\t\ttrue If the first column (Read_x_values) contains spatial\n\t\t\t\t\t\tfrequencies. False if it contains lenghts. Default = True\n\t\txScaling, yScaling: floats\n\t\t\t\t\t\tRead_x_values => Read_x_values * xScaling\n\n\t\t\t\t\t\tRead_y_values => Read_y_values * yScaling\n\n\t\t\t\t\t\tSometimes, properly setting the x and y scaling values may be confusing (although just matter of high-school considerations). On this purpose, the property .RoughnessMaker.ProfileScaling property can be used also..ProfileScaling is the scale factor that acts on the output of MakeProfile() function only.\n\n\t\tremarks\n\t\t\t\t\t\t--------\n\t\t\t\t\t\tpippo\n\n\t\t' try: self._IsNumericPsdInFreq = xIsSpatialFreq s = np.loadtxt(FilePath) x = s[:, 0] y = s[:, 1] x = (x * xScaling) y = (y * yScaling) if (xIsSpatialFreq == False): f = (1 / x) else: f = x i = np.argsort(f) f = f[i] y = y[i] self.PsdCutoffLowHigh = [np.amin, np.amax(f)] self.PsdType = PsdFuns.Interp self.PsdParams = [f, y] self.NumericPsdSetXY(f, y) except: pass def Generate(self, N=None, dx=None, CutoffLowHigh=[None, None]): '\n\t\t\tParameters\n\t\t\t\tN: # of output samples\n\t\t\t\tdx: step of the x axis\n\t\t\tNote: generates an evenly spaced array\n\t\t\t' L = (dx * N) df = (1 / L) (fPsd, yPsd) = self.PsdEval(((N // 2) + 1), df=df, CutoffLowHigh=CutoffLowHigh) h = Psd2Noise_1d(yPsd, Semiaxis=True) return h
def Generate(self, N=None, dx=None, CutoffLowHigh=[None, None]): '\n\t\t\tParameters\n\t\t\t\tN: # of output samples\n\t\t\t\tdx: step of the x axis\n\t\t\tNote: generates an evenly spaced array\n\t\t\t' L = (dx * N) df = (1 / L) (fPsd, yPsd) = self.PsdEval(((N // 2) + 1), df=df, CutoffLowHigh=CutoffLowHigh) h = Psd2Noise_1d(yPsd, Semiaxis=True) return h
-6,980,789,987,062,936,000
Parameters N: # of output samples dx: step of the x axis Note: generates an evenly spaced array
wiselib2/Noise.py
Generate
WISE-Project/wiselib2
python
def Generate(self, N=None, dx=None, CutoffLowHigh=[None, None]): '\n\t\t\tParameters\n\t\t\t\tN: # of output samples\n\t\t\t\tdx: step of the x axis\n\t\t\tNote: generates an evenly spaced array\n\t\t\t' L = (dx * N) df = (1 / L) (fPsd, yPsd) = self.PsdEval(((N // 2) + 1), df=df, CutoffLowHigh=CutoffLowHigh) h = Psd2Noise_1d(yPsd, Semiaxis=True) return h
def test_values(self): "Checks that printing Values uses the GTDKeyFormatter instead of gtsam's default" v = gtd.Values() gtd.InsertJointAngle(v, 0, 1, 2) self.assertTrue(('q(0)1' in v.__repr__()))
-3,425,055,309,815,927,300
Checks that printing Values uses the GTDKeyFormatter instead of gtsam's default
python/tests/test_print.py
test_values
borglab/GTDynamics
python
def test_values(self): v = gtd.Values() gtd.InsertJointAngle(v, 0, 1, 2) self.assertTrue(('q(0)1' in v.__repr__()))
def test_nonlinear_factor_graph(self): 'Checks that printing NonlinearFactorGraph uses the GTDKeyFormatter' fg = gtd.NonlinearFactorGraph() fg.push_back(gtd.MinTorqueFactor(gtd.TorqueKey(0, 0).key(), gtsam.noiseModel.Unit.Create(1))) self.assertTrue(('T(0)0' in fg.__repr__()))
-2,596,791,198,090,200,000
Checks that printing NonlinearFactorGraph uses the GTDKeyFormatter
python/tests/test_print.py
test_nonlinear_factor_graph
borglab/GTDynamics
python
def test_nonlinear_factor_graph(self): fg = gtd.NonlinearFactorGraph() fg.push_back(gtd.MinTorqueFactor(gtd.TorqueKey(0, 0).key(), gtsam.noiseModel.Unit.Create(1))) self.assertTrue(('T(0)0' in fg.__repr__()))
def test_key_formatter(self): 'Tests print method with various key formatters' torqueKey = gtd.TorqueKey(0, 0).key() factor = gtd.MinTorqueFactor(torqueKey, gtsam.noiseModel.Unit.Create(1)) with patch('sys.stdout', new=StringIO()) as fake_out: factor.print('factor: ', gtd.GTDKeyFormatter) self.assertTrue(('factor: min torque factor' in fake_out.getvalue())) self.assertTrue(('keys = { T(0)0 }' in fake_out.getvalue())) def myKeyFormatter(key): return 'this is my key formatter {}'.format(key) with patch('sys.stdout', new=StringIO()) as fake_out: factor.print('factor: ', myKeyFormatter) self.assertTrue(('factor: min torque factor' in fake_out.getvalue())) self.assertTrue(('keys = {{ this is my key formatter {} }}'.format(torqueKey) in fake_out.getvalue()))
5,035,082,158,590,876,000
Tests print method with various key formatters
python/tests/test_print.py
test_key_formatter
borglab/GTDynamics
python
def test_key_formatter(self): torqueKey = gtd.TorqueKey(0, 0).key() factor = gtd.MinTorqueFactor(torqueKey, gtsam.noiseModel.Unit.Create(1)) with patch('sys.stdout', new=StringIO()) as fake_out: factor.print('factor: ', gtd.GTDKeyFormatter) self.assertTrue(('factor: min torque factor' in fake_out.getvalue())) self.assertTrue(('keys = { T(0)0 }' in fake_out.getvalue())) def myKeyFormatter(key): return 'this is my key formatter {}'.format(key) with patch('sys.stdout', new=StringIO()) as fake_out: factor.print('factor: ', myKeyFormatter) self.assertTrue(('factor: min torque factor' in fake_out.getvalue())) self.assertTrue(('keys = {{ this is my key formatter {} }}'.format(torqueKey) in fake_out.getvalue()))
def sample(self): ' Returns a array with one sample from each discrete action space ' np_random = np.random.RandomState() random_array = np_random.rand(self.num_discrete_space) return [int(x) for x in np.floor((np.multiply(((self.high - self.low) + 1.0), random_array) + self.low))]
-7,685,226,046,512,190,000
Returns a array with one sample from each discrete action space
multiagent/multi_discrete.py
sample
51N84D/multiagent-particle-envs
python
def sample(self): ' ' np_random = np.random.RandomState() random_array = np_random.rand(self.num_discrete_space) return [int(x) for x in np.floor((np.multiply(((self.high - self.low) + 1.0), random_array) + self.low))]
def get_minimum_version_all_cells(context, binaries, require_all=False): "Get the minimum service version, checking all cells.\n\n This attempts to calculate the minimum service version for a set\n of binaries across all the cells in the system. If require_all\n is False, then any cells that fail to report a version will be\n ignored (assuming they won't be candidates for scheduling and thus\n excluding them from the minimum version calculation is reasonable).\n If require_all is True, then a failing cell will cause this to raise\n exception.CellTimeout, as would be appropriate for gating some\n data migration until everything is new enough.\n\n Note that services that do not report a positive version are excluded\n from this, as it crosses all cells which will naturally not have all\n services.\n " if (not all((binary.startswith('nova-') for binary in binaries))): LOG.warning("get_minimum_version_all_cells called with likely-incorrect binaries `%s'", ','.join(binaries)) raise exception.ObjectActionError(action='get_minimum_version_all_cells', reason='Invalid binary prefix') results = nova_context.scatter_gather_all_cells(context, Service._db_service_get_minimum_version, binaries) min_version = None for (cell_uuid, result) in results.items(): if (result is nova_context.did_not_respond_sentinel): LOG.warning('Cell %s did not respond when getting minimum service version', cell_uuid) if require_all: raise exception.CellTimeout() elif (result is nova_context.raised_exception_sentinel): LOG.warning('Failed to get minimum service version for cell %s', cell_uuid) if require_all: raise exception.CellTimeout() else: relevant_versions = [version for version in result.values() if version] if relevant_versions: min_version_cell = min(relevant_versions) min_version = (min(min_version, min_version_cell) if min_version else min_version_cell) return (min_version or 0)
-6,785,959,881,574,487,000
Get the minimum service version, checking all cells. This attempts to calculate the minimum service version for a set of binaries across all the cells in the system. If require_all is False, then any cells that fail to report a version will be ignored (assuming they won't be candidates for scheduling and thus excluding them from the minimum version calculation is reasonable). If require_all is True, then a failing cell will cause this to raise exception.CellTimeout, as would be appropriate for gating some data migration until everything is new enough. Note that services that do not report a positive version are excluded from this, as it crosses all cells which will naturally not have all services.
nova/objects/service.py
get_minimum_version_all_cells
bopopescu/TestNova
python
def get_minimum_version_all_cells(context, binaries, require_all=False): "Get the minimum service version, checking all cells.\n\n This attempts to calculate the minimum service version for a set\n of binaries across all the cells in the system. If require_all\n is False, then any cells that fail to report a version will be\n ignored (assuming they won't be candidates for scheduling and thus\n excluding them from the minimum version calculation is reasonable).\n If require_all is True, then a failing cell will cause this to raise\n exception.CellTimeout, as would be appropriate for gating some\n data migration until everything is new enough.\n\n Note that services that do not report a positive version are excluded\n from this, as it crosses all cells which will naturally not have all\n services.\n " if (not all((binary.startswith('nova-') for binary in binaries))): LOG.warning("get_minimum_version_all_cells called with likely-incorrect binaries `%s'", ','.join(binaries)) raise exception.ObjectActionError(action='get_minimum_version_all_cells', reason='Invalid binary prefix') results = nova_context.scatter_gather_all_cells(context, Service._db_service_get_minimum_version, binaries) min_version = None for (cell_uuid, result) in results.items(): if (result is nova_context.did_not_respond_sentinel): LOG.warning('Cell %s did not respond when getting minimum service version', cell_uuid) if require_all: raise exception.CellTimeout() elif (result is nova_context.raised_exception_sentinel): LOG.warning('Failed to get minimum service version for cell %s', cell_uuid) if require_all: raise exception.CellTimeout() else: relevant_versions = [version for version in result.values() if version] if relevant_versions: min_version_cell = min(relevant_versions) min_version = (min(min_version, min_version_cell) if min_version else min_version_cell) return (min_version or 0)
def _check_minimum_version(self): "Enforce that we are not older that the minimum version.\n\n This is a loose check to avoid creating or updating our service\n record if we would do so with a version that is older that the current\n minimum of all services. This could happen if we were started with\n older code by accident, either due to a rollback or an old and\n un-updated node suddenly coming back onto the network.\n\n There is technically a race here between the check and the update,\n but since the minimum version should always roll forward and never\n backwards, we don't need to worry about doing it atomically. Further,\n the consequence for getting this wrong is minor, in that we'll just\n fail to send messages that other services understand.\n " if (not self.obj_attr_is_set('version')): return if (not self.obj_attr_is_set('binary')): return minver = self.get_minimum_version(self._context, self.binary) if (minver > self.version): raise exception.ServiceTooOld(thisver=self.version, minver=minver)
-24,800,499,018,735,096
Enforce that we are not older that the minimum version. This is a loose check to avoid creating or updating our service record if we would do so with a version that is older that the current minimum of all services. This could happen if we were started with older code by accident, either due to a rollback or an old and un-updated node suddenly coming back onto the network. There is technically a race here between the check and the update, but since the minimum version should always roll forward and never backwards, we don't need to worry about doing it atomically. Further, the consequence for getting this wrong is minor, in that we'll just fail to send messages that other services understand.
nova/objects/service.py
_check_minimum_version
bopopescu/TestNova
python
def _check_minimum_version(self): "Enforce that we are not older that the minimum version.\n\n This is a loose check to avoid creating or updating our service\n record if we would do so with a version that is older that the current\n minimum of all services. This could happen if we were started with\n older code by accident, either due to a rollback or an old and\n un-updated node suddenly coming back onto the network.\n\n There is technically a race here between the check and the update,\n but since the minimum version should always roll forward and never\n backwards, we don't need to worry about doing it atomically. Further,\n the consequence for getting this wrong is minor, in that we'll just\n fail to send messages that other services understand.\n " if (not self.obj_attr_is_set('version')): return if (not self.obj_attr_is_set('binary')): return minver = self.get_minimum_version(self._context, self.binary) if (minver > self.version): raise exception.ServiceTooOld(thisver=self.version, minver=minver)
def _fetch_art(self, should_exist): "Execute the fetch_art coroutine for the task and return the\n album's resulting artpath. ``should_exist`` specifies whether to\n assert that art path was set (to the correct value) or or that\n the path was not set.\n " self.plugin.fetch_art(self.session, self.task) self.plugin.assign_art(self.session, self.task) artpath = self.lib.albums()[0].artpath if should_exist: self.assertEqual(artpath, os.path.join(os.path.dirname(self.i.path), 'cover.jpg')) self.assertExists(artpath) else: self.assertEqual(artpath, None) return artpath
8,313,379,815,369,596,000
Execute the fetch_art coroutine for the task and return the album's resulting artpath. ``should_exist`` specifies whether to assert that art path was set (to the correct value) or or that the path was not set.
test/test_art.py
_fetch_art
parapente/beets
python
def _fetch_art(self, should_exist): "Execute the fetch_art coroutine for the task and return the\n album's resulting artpath. ``should_exist`` specifies whether to\n assert that art path was set (to the correct value) or or that\n the path was not set.\n " self.plugin.fetch_art(self.session, self.task) self.plugin.assign_art(self.session, self.task) artpath = self.lib.albums()[0].artpath if should_exist: self.assertEqual(artpath, os.path.join(os.path.dirname(self.i.path), 'cover.jpg')) self.assertExists(artpath) else: self.assertEqual(artpath, None) return artpath
def _require_backend(self): "Skip the test if the art resizer doesn't have ImageMagick or\n PIL (so comparisons and measurements are unavailable).\n " if (ArtResizer.shared.method[0] == WEBPROXY): self.skipTest('ArtResizer has no local imaging backend available')
-2,401,322,291,639,770,000
Skip the test if the art resizer doesn't have ImageMagick or PIL (so comparisons and measurements are unavailable).
test/test_art.py
_require_backend
parapente/beets
python
def _require_backend(self): "Skip the test if the art resizer doesn't have ImageMagick or\n PIL (so comparisons and measurements are unavailable).\n " if (ArtResizer.shared.method[0] == WEBPROXY): self.skipTest('ArtResizer has no local imaging backend available')
def _TwoArgs(w_parser): 'Returns an expression tree to be evaluated.' w0 = w_parser.Read() w1 = w_parser.Read() s0 = w0.s if (s0 == '!'): return bool_expr.LogicalNot(bool_expr.WordTest(w1)) unary_id = Id.Undefined_Tok if w0.s.startswith('--'): if (s0 == '--dir'): unary_id = Id.BoolUnary_d elif (s0 == '--exists'): unary_id = Id.BoolUnary_e elif (s0 == '--file'): unary_id = Id.BoolUnary_f elif (s0 == '--symlink'): unary_id = Id.BoolUnary_L if (unary_id == Id.Undefined_Tok): unary_id = match.BracketUnary(w0.s) if (unary_id == Id.Undefined_Tok): p_die('Expected unary operator, got %r (2 args)', w0.s, word=w0) return bool_expr.Unary(unary_id, w1)
1,459,597,716,925,286,000
Returns an expression tree to be evaluated.
osh/builtin_bracket.py
_TwoArgs
Schweinepriester/oil
python
def _TwoArgs(w_parser): w0 = w_parser.Read() w1 = w_parser.Read() s0 = w0.s if (s0 == '!'): return bool_expr.LogicalNot(bool_expr.WordTest(w1)) unary_id = Id.Undefined_Tok if w0.s.startswith('--'): if (s0 == '--dir'): unary_id = Id.BoolUnary_d elif (s0 == '--exists'): unary_id = Id.BoolUnary_e elif (s0 == '--file'): unary_id = Id.BoolUnary_f elif (s0 == '--symlink'): unary_id = Id.BoolUnary_L if (unary_id == Id.Undefined_Tok): unary_id = match.BracketUnary(w0.s) if (unary_id == Id.Undefined_Tok): p_die('Expected unary operator, got %r (2 args)', w0.s, word=w0) return bool_expr.Unary(unary_id, w1)
def _ThreeArgs(w_parser): 'Returns an expression tree to be evaluated.' w0 = w_parser.Read() w1 = w_parser.Read() w2 = w_parser.Read() binary_id = match.BracketBinary(w1.s) if (binary_id != Id.Undefined_Tok): return bool_expr.Binary(binary_id, w0, w2) if (w1.s == '-a'): return bool_expr.LogicalAnd(bool_expr.WordTest(w0), bool_expr.WordTest(w2)) if (w1.s == '-o'): return bool_expr.LogicalOr(bool_expr.WordTest(w0), bool_expr.WordTest(w2)) if (w0.s == '!'): w_parser.Rewind(2) child = _TwoArgs(w_parser) return bool_expr.LogicalNot(child) if ((w0.s == '(') and (w2.s == ')')): return bool_expr.WordTest(w1) p_die('Expected binary operator, got %r (3 args)', w1.s, word=w1)
-3,653,633,521,888,061,000
Returns an expression tree to be evaluated.
osh/builtin_bracket.py
_ThreeArgs
Schweinepriester/oil
python
def _ThreeArgs(w_parser): w0 = w_parser.Read() w1 = w_parser.Read() w2 = w_parser.Read() binary_id = match.BracketBinary(w1.s) if (binary_id != Id.Undefined_Tok): return bool_expr.Binary(binary_id, w0, w2) if (w1.s == '-a'): return bool_expr.LogicalAnd(bool_expr.WordTest(w0), bool_expr.WordTest(w2)) if (w1.s == '-o'): return bool_expr.LogicalOr(bool_expr.WordTest(w0), bool_expr.WordTest(w2)) if (w0.s == '!'): w_parser.Rewind(2) child = _TwoArgs(w_parser) return bool_expr.LogicalNot(child) if ((w0.s == '(') and (w2.s == ')')): return bool_expr.WordTest(w1) p_die('Expected binary operator, got %r (3 args)', w1.s, word=w1)
def ReadWord(self, unused_lex_mode): 'Interface for bool_parse.py.\n\n TODO: This should probably be word_t\n ' if (self.i == self.n): w = word.String(Id.Eof_Real, '', runtime.NO_SPID) return w s = self.cmd_val.argv[self.i] left_spid = self.cmd_val.arg_spids[self.i] self.i += 1 id_ = match.BracketUnary(s) if (id_ == Id.Undefined_Tok): id_ = match.BracketBinary(s) if (id_ == Id.Undefined_Tok): id_ = match.BracketOther(s) if (id_ == Id.Undefined_Tok): id_ = Id.Word_Compound w = word.String(id_, s, left_spid) return w
-3,903,152,664,439,137,000
Interface for bool_parse.py. TODO: This should probably be word_t
osh/builtin_bracket.py
ReadWord
Schweinepriester/oil
python
def ReadWord(self, unused_lex_mode): 'Interface for bool_parse.py.\n\n TODO: This should probably be word_t\n ' if (self.i == self.n): w = word.String(Id.Eof_Real, , runtime.NO_SPID) return w s = self.cmd_val.argv[self.i] left_spid = self.cmd_val.arg_spids[self.i] self.i += 1 id_ = match.BracketUnary(s) if (id_ == Id.Undefined_Tok): id_ = match.BracketBinary(s) if (id_ == Id.Undefined_Tok): id_ = match.BracketOther(s) if (id_ == Id.Undefined_Tok): id_ = Id.Word_Compound w = word.String(id_, s, left_spid) return w
def Read(self): 'Interface used for special cases below.' return self.ReadWord(lex_mode_e.ShCommand)
6,585,227,498,377,012,000
Interface used for special cases below.
osh/builtin_bracket.py
Read
Schweinepriester/oil
python
def Read(self): return self.ReadWord(lex_mode_e.ShCommand)
def Peek(self, offset): 'For special cases.' return self.cmd_val.argv[(self.i + offset)]
-4,502,824,355,368,007,000
For special cases.
osh/builtin_bracket.py
Peek
Schweinepriester/oil
python
def Peek(self, offset): return self.cmd_val.argv[(self.i + offset)]
def Rewind(self, offset): 'For special cases.' self.i -= offset
4,830,300,084,656,957,000
For special cases.
osh/builtin_bracket.py
Rewind
Schweinepriester/oil
python
def Rewind(self, offset): self.i -= offset
def Run(self, cmd_val): 'The test/[ builtin.\n\n The only difference between test and [ is that [ needs a matching ].\n ' if self.need_right_bracket: if self.exec_opts.simple_test_builtin(): e_usage("should be invoked as 'test' (simple_test_builtin)") strs = cmd_val.argv if ((not strs) or (strs[(- 1)] != ']')): self.errfmt.Print_('missing closing ]', span_id=cmd_val.arg_spids[0]) return 2 cmd_val.argv.pop() cmd_val.arg_spids.pop() w_parser = _StringWordEmitter(cmd_val) w_parser.Read() b_parser = bool_parse.BoolParser(w_parser) bool_node = None n = (len(cmd_val.argv) - 1) if (self.exec_opts.simple_test_builtin() and (n > 3)): e_usage('should only have 3 arguments or fewer (simple_test_builtin)') try: if (n == 0): return 1 elif (n == 1): w = w_parser.Read() bool_node = bool_expr.WordTest(w) elif (n == 2): bool_node = _TwoArgs(w_parser) elif (n == 3): bool_node = _ThreeArgs(w_parser) if (n == 4): a0 = w_parser.Peek(0) if (a0 == '!'): w_parser.Read() child = _ThreeArgs(w_parser) bool_node = bool_expr.LogicalNot(child) elif ((a0 == '(') and (w_parser.Peek(3) == ')')): w_parser.Read() bool_node = _TwoArgs(w_parser) else: pass if (bool_node is None): bool_node = b_parser.ParseForBuiltin() except error.Parse as e: self.errfmt.PrettyPrintError(e, prefix='(test) ') return 2 word_ev = _WordEvaluator() bool_ev = sh_expr_eval.BoolEvaluator(self.mem, self.exec_opts, None, self.errfmt) bool_ev.Init_AlwaysStrict() bool_ev.word_ev = word_ev bool_ev.CheckCircularDeps() try: b = bool_ev.EvalB(bool_node) except error._ErrorWithLocation as e: self.errfmt.PrettyPrintError(e, prefix='(test) ') return 2 status = (0 if b else 1) return status
-3,654,161,641,829,040,000
The test/[ builtin. The only difference between test and [ is that [ needs a matching ].
osh/builtin_bracket.py
Run
Schweinepriester/oil
python
def Run(self, cmd_val): 'The test/[ builtin.\n\n The only difference between test and [ is that [ needs a matching ].\n ' if self.need_right_bracket: if self.exec_opts.simple_test_builtin(): e_usage("should be invoked as 'test' (simple_test_builtin)") strs = cmd_val.argv if ((not strs) or (strs[(- 1)] != ']')): self.errfmt.Print_('missing closing ]', span_id=cmd_val.arg_spids[0]) return 2 cmd_val.argv.pop() cmd_val.arg_spids.pop() w_parser = _StringWordEmitter(cmd_val) w_parser.Read() b_parser = bool_parse.BoolParser(w_parser) bool_node = None n = (len(cmd_val.argv) - 1) if (self.exec_opts.simple_test_builtin() and (n > 3)): e_usage('should only have 3 arguments or fewer (simple_test_builtin)') try: if (n == 0): return 1 elif (n == 1): w = w_parser.Read() bool_node = bool_expr.WordTest(w) elif (n == 2): bool_node = _TwoArgs(w_parser) elif (n == 3): bool_node = _ThreeArgs(w_parser) if (n == 4): a0 = w_parser.Peek(0) if (a0 == '!'): w_parser.Read() child = _ThreeArgs(w_parser) bool_node = bool_expr.LogicalNot(child) elif ((a0 == '(') and (w_parser.Peek(3) == ')')): w_parser.Read() bool_node = _TwoArgs(w_parser) else: pass if (bool_node is None): bool_node = b_parser.ParseForBuiltin() except error.Parse as e: self.errfmt.PrettyPrintError(e, prefix='(test) ') return 2 word_ev = _WordEvaluator() bool_ev = sh_expr_eval.BoolEvaluator(self.mem, self.exec_opts, None, self.errfmt) bool_ev.Init_AlwaysStrict() bool_ev.word_ev = word_ev bool_ev.CheckCircularDeps() try: b = bool_ev.EvalB(bool_node) except error._ErrorWithLocation as e: self.errfmt.PrettyPrintError(e, prefix='(test) ') return 2 status = (0 if b else 1) return status
def main(): ' parse command line options and run commands.\n ' parser = argparse.ArgumentParser(description='A web crawler for testing website links validation.') parser.add_argument('-V', '--version', dest='version', action='store_true', help='show version') parser.add_argument('--log-level', default='INFO', help='Specify logging level, default is INFO.') parser.add_argument('--config-file', help='Specify config file path.') parser.add_argument('--seeds', default='http://debugtalk.com', help='Specify crawl seed url(s), several urls can be specified with pipe; if auth needed, seeds can be specified like user1:pwd1@url1|user2:pwd2@url2') parser.add_argument('--include-hosts', help='Specify extra hosts to be crawled.') parser.add_argument('--cookies', help="Specify cookies, several cookies can be joined by '|'. e.g. 'lang:en,country:us|lang:zh,country:cn'") parser.add_argument('--crawl-mode', default='BFS', help='Specify crawl mode, BFS or DFS.') parser.add_argument('--max-depth', default=5, type=int, help='Specify max crawl depth.') parser.add_argument('--concurrency', help='Specify concurrent workers number.') parser.add_argument('--save-results', default='NO', help='Specify if save results, default is NO.') parser.add_argument('--grey-user-agent', help='Specify grey environment header User-Agent.') parser.add_argument('--grey-traceid', help='Specify grey environment cookie traceid.') parser.add_argument('--grey-view-grey', help='Specify grey environment cookie view_gray.') try: from jenkins_mail_py import MailgunHelper mailer = MailgunHelper(parser) except ImportError: mailer = None args = parser.parse_args() if args.version: print('WebCrawler version: {}'.format(__version__)) exit(0) log_level = getattr(logging, args.log_level.upper()) logging.basicConfig(level=log_level) color_logging(('args: %s' % args)) main_crawler(args, mailer)
-7,323,592,258,720,708,000
parse command line options and run commands.
webcrawler/__init__.py
main
debugtalk/WebCrawler
python
def main(): ' \n ' parser = argparse.ArgumentParser(description='A web crawler for testing website links validation.') parser.add_argument('-V', '--version', dest='version', action='store_true', help='show version') parser.add_argument('--log-level', default='INFO', help='Specify logging level, default is INFO.') parser.add_argument('--config-file', help='Specify config file path.') parser.add_argument('--seeds', default='http://debugtalk.com', help='Specify crawl seed url(s), several urls can be specified with pipe; if auth needed, seeds can be specified like user1:pwd1@url1|user2:pwd2@url2') parser.add_argument('--include-hosts', help='Specify extra hosts to be crawled.') parser.add_argument('--cookies', help="Specify cookies, several cookies can be joined by '|'. e.g. 'lang:en,country:us|lang:zh,country:cn'") parser.add_argument('--crawl-mode', default='BFS', help='Specify crawl mode, BFS or DFS.') parser.add_argument('--max-depth', default=5, type=int, help='Specify max crawl depth.') parser.add_argument('--concurrency', help='Specify concurrent workers number.') parser.add_argument('--save-results', default='NO', help='Specify if save results, default is NO.') parser.add_argument('--grey-user-agent', help='Specify grey environment header User-Agent.') parser.add_argument('--grey-traceid', help='Specify grey environment cookie traceid.') parser.add_argument('--grey-view-grey', help='Specify grey environment cookie view_gray.') try: from jenkins_mail_py import MailgunHelper mailer = MailgunHelper(parser) except ImportError: mailer = None args = parser.parse_args() if args.version: print('WebCrawler version: {}'.format(__version__)) exit(0) log_level = getattr(logging, args.log_level.upper()) logging.basicConfig(level=log_level) color_logging(('args: %s' % args)) main_crawler(args, mailer)
def init(data): '\n Initialize grid and update cell values\n\n\n @param data -> Json response from bottle\n\n @return game_id -> Game id for debuggin purposes when displaying grid\n\n @return grid -> Grid with updated cell values\n\n @return food -> Sorted array of food by closest to charlie\n\n @return charlie -> My snake\n\n @return enemies -> Array of all enemy snakes\n\n @return check_food -> Secondary grid to look ahead when eating food\n ' food = [] enemies = [] grid = GameBoard(data['board']['height'], data['board']['width']) check_food = GameBoard(data['board']['height'], data['board']['width']) charlie = Snake(data['you']) for i in data['board']['food']: food.append([i['x'], i['y']]) grid.set_cell([i['x'], i['y']], FOOD) check_food.set_cell([i['x'], i['y']], FOOD) for snake in data['board']['snakes']: snake = Snake(snake) for coord in snake.coords: grid.set_cell(coord, SNAKE) check_food.set_cell(coord, SNAKE) if ((snake.health < 100) and (snake.length > 2) and (data['turn'] >= 3)): grid.set_cell(snake.tail, SAFTEY) check_food.set_cell(snake.tail, SAFTEY) if (snake.id != charlie.id): for neighbour in neighbours(snake.head, grid, 0, snake.coords, [1]): if (snake.length >= charlie.length): grid.set_cell(neighbour, DANGER) check_food.set_cell(neighbour, DANGER) enemies.append(snake) food = sorted(food, key=(lambda p: distance(p, charlie.head))) game_id = data['game']['id'] return (game_id, grid, food, charlie, enemies, check_food)
-1,022,715,409,479,971,600
Initialize grid and update cell values @param data -> Json response from bottle @return game_id -> Game id for debuggin purposes when displaying grid @return grid -> Grid with updated cell values @return food -> Sorted array of food by closest to charlie @return charlie -> My snake @return enemies -> Array of all enemy snakes @return check_food -> Secondary grid to look ahead when eating food
app/main.py
init
ntmk/battlesnake-2019-pixelated
python
def init(data): '\n Initialize grid and update cell values\n\n\n @param data -> Json response from bottle\n\n @return game_id -> Game id for debuggin purposes when displaying grid\n\n @return grid -> Grid with updated cell values\n\n @return food -> Sorted array of food by closest to charlie\n\n @return charlie -> My snake\n\n @return enemies -> Array of all enemy snakes\n\n @return check_food -> Secondary grid to look ahead when eating food\n ' food = [] enemies = [] grid = GameBoard(data['board']['height'], data['board']['width']) check_food = GameBoard(data['board']['height'], data['board']['width']) charlie = Snake(data['you']) for i in data['board']['food']: food.append([i['x'], i['y']]) grid.set_cell([i['x'], i['y']], FOOD) check_food.set_cell([i['x'], i['y']], FOOD) for snake in data['board']['snakes']: snake = Snake(snake) for coord in snake.coords: grid.set_cell(coord, SNAKE) check_food.set_cell(coord, SNAKE) if ((snake.health < 100) and (snake.length > 2) and (data['turn'] >= 3)): grid.set_cell(snake.tail, SAFTEY) check_food.set_cell(snake.tail, SAFTEY) if (snake.id != charlie.id): for neighbour in neighbours(snake.head, grid, 0, snake.coords, [1]): if (snake.length >= charlie.length): grid.set_cell(neighbour, DANGER) check_food.set_cell(neighbour, DANGER) enemies.append(snake) food = sorted(food, key=(lambda p: distance(p, charlie.head))) game_id = data['game']['id'] return (game_id, grid, food, charlie, enemies, check_food)
def __init__(self): '\n The constructor of the ProcedureParams class\n ' self.paramsDict = {}
-882,860,014,774,851,600
The constructor of the ProcedureParams class
QCompute/QuantumPlatform/ProcedureParams.py
__init__
rickyHong/Qcompute-repl
python
def __init__(self): '\n \n ' self.paramsDict = {}
def __getitem__(self, index): '\n Get the procedure params according to the index.\n\n Create the register when it does not exist.\n\n :param index:\n :return: ProcedureParamStorage\n ' value = self.paramsDict.get(index) if (value is not None): return value value = ProcedureParamStorage(index) self.paramsDict[index] = value return value
-1,781,732,992,971,712,000
Get the procedure params according to the index. Create the register when it does not exist. :param index: :return: ProcedureParamStorage
QCompute/QuantumPlatform/ProcedureParams.py
__getitem__
rickyHong/Qcompute-repl
python
def __getitem__(self, index): '\n Get the procedure params according to the index.\n\n Create the register when it does not exist.\n\n :param index:\n :return: ProcedureParamStorage\n ' value = self.paramsDict.get(index) if (value is not None): return value value = ProcedureParamStorage(index) self.paramsDict[index] = value return value
def __init__(self, index): '\n The quantum param object needs to know its index.\n :param index: the quantum register index\n ' self.index = index
-2,650,615,695,593,564,000
The quantum param object needs to know its index. :param index: the quantum register index
QCompute/QuantumPlatform/ProcedureParams.py
__init__
rickyHong/Qcompute-repl
python
def __init__(self, index): '\n The quantum param object needs to know its index.\n :param index: the quantum register index\n ' self.index = index
def wait_futures(future_list): '\n Wait for all of a list of futures to finish.\n Works with FutureWrapper.\n ' ret = [] futs = [] for f in future_list: if (f is None): continue if ((not isinstance(f, ndb.Future)) and (not isinstance(f, FutureWrapper))): ret.append(FutureWrapper(f)) else: futs.append(f) ndb.Future.wait_all(futs) return (futs + ret)
-616,037,939,454,443,500
Wait for all of a list of futures to finish. Works with FutureWrapper.
ms/storage/backends/google_appengine.py
wait_futures
jcnelson/syndicate
python
def wait_futures(future_list): '\n Wait for all of a list of futures to finish.\n Works with FutureWrapper.\n ' ret = [] futs = [] for f in future_list: if (f is None): continue if ((not isinstance(f, ndb.Future)) and (not isinstance(f, FutureWrapper))): ret.append(FutureWrapper(f)) else: futs.append(f) ndb.Future.wait_all(futs) return (futs + ret)
@property def title(self): 'This property is used to give the title of the portlet in the\n "manage portlets" screen.\n ' return _('Categories')
31,747,719,407,640,710
This property is used to give the title of the portlet in the "manage portlets" screen.
src/wad.blog/wad/blog/portlets/categories.py
title
potzenheimer/buildout.wad
python
@property def title(self): 'This property is used to give the title of the portlet in the\n "manage portlets" screen.\n ' return _('Categories')
def get_id(xkcd_link: str) -> int: '\n Exctract comic id from xkcd link\n ' match = re.search('\\d+', xkcd_link) if match: return int(match.group()) else: return 0
-632,864,871,487,891,600
Exctract comic id from xkcd link
xkcd_feed/src/utils.py
get_id
lwittchen/twitter-bots
python
def get_id(xkcd_link: str) -> int: '\n \n ' match = re.search('\\d+', xkcd_link) if match: return int(match.group()) else: return 0
def get_xkcd_rss_entries(url: str): '\n Load latest XKCD RSS feed and extract latest entry\n ' feed = feedparser.parse(url) return feed.get('entries')
-6,917,596,288,648,909,000
Load latest XKCD RSS feed and extract latest entry
xkcd_feed/src/utils.py
get_xkcd_rss_entries
lwittchen/twitter-bots
python
def get_xkcd_rss_entries(url: str): '\n \n ' feed = feedparser.parse(url) return feed.get('entries')
def get_latest_rss_entry(entries: list): '\n Extract latest entry from XKCD RSS feed and\n parse the ID\n ' entry = entries[0] id_ = get_id(xkcd_link=entry.get('id')) return (id_, entry)
739,203,433,765,726,000
Extract latest entry from XKCD RSS feed and parse the ID
xkcd_feed/src/utils.py
get_latest_rss_entry
lwittchen/twitter-bots
python
def get_latest_rss_entry(entries: list): '\n Extract latest entry from XKCD RSS feed and\n parse the ID\n ' entry = entries[0] id_ = get_id(xkcd_link=entry.get('id')) return (id_, entry)
def downdload_comic(entry: dict, filename: str) -> None: '\n Download latest image and store it in\n current working directory\n ' match = re.search('src="(.*png)"', entry['summary']) if match: img_url = match.groups()[0] r = requests.get(img_url) r.raise_for_status() with open(filename, 'wb') as f: f.write(r.content) return None
197,688,600,736,025,340
Download latest image and store it in current working directory
xkcd_feed/src/utils.py
downdload_comic
lwittchen/twitter-bots
python
def downdload_comic(entry: dict, filename: str) -> None: '\n Download latest image and store it in\n current working directory\n ' match = re.search('src="(.*png)"', entry['summary']) if match: img_url = match.groups()[0] r = requests.get(img_url) r.raise_for_status() with open(filename, 'wb') as f: f.write(r.content) return None
def initialize_twitter_api(config: ConfigParser): '\n Do authentication and return read-to-use\n twitter api object\n ' twitter_config = config['twitter'] auth = tweepy.OAuthHandler(twitter_config.get('consumer_key'), twitter_config.get('consumer_secret')) auth.set_access_token(twitter_config.get('access_token'), twitter_config.get('access_secret')) api = tweepy.API(auth) return api
-5,137,897,398,968,907,000
Do authentication and return read-to-use twitter api object
xkcd_feed/src/utils.py
initialize_twitter_api
lwittchen/twitter-bots
python
def initialize_twitter_api(config: ConfigParser): '\n Do authentication and return read-to-use\n twitter api object\n ' twitter_config = config['twitter'] auth = tweepy.OAuthHandler(twitter_config.get('consumer_key'), twitter_config.get('consumer_secret')) auth.set_access_token(twitter_config.get('access_token'), twitter_config.get('access_secret')) api = tweepy.API(auth) return api
def send_twitter_post(entry: dict, api: tweepy.API, img_fname: str) -> None: '\n Post tweet on twitter\n ' match = re.search('title=(.*)/>', entry['summary']) if match: msg = match.groups()[0] msg += f''' {entry['link']}''' else: msg = '-- No Title --' api.update_with_media(status=msg, filename=img_fname) return None
917,891,198,929,523,700
Post tweet on twitter
xkcd_feed/src/utils.py
send_twitter_post
lwittchen/twitter-bots
python
def send_twitter_post(entry: dict, api: tweepy.API, img_fname: str) -> None: '\n \n ' match = re.search('title=(.*)/>', entry['summary']) if match: msg = match.groups()[0] msg += f' {entry['link']}' else: msg = '-- No Title --' api.update_with_media(status=msg, filename=img_fname) return None
def to_localstack_url(api_id: str, url: str): '\n Converts a API GW url to localstack\n ' return url.replace('4566', f'4566/restapis/{api_id}').replace('dev', 'dev/_user_request_')
-8,481,725,068,796,059,000
Converts a API GW url to localstack
test/api_gw_test.py
to_localstack_url
fredliporace/cbers-2-stac
python
def to_localstack_url(api_id: str, url: str): '\n \n ' return url.replace('4566', f'4566/restapis/{api_id}').replace('dev', 'dev/_user_request_')
def api_gw_lambda_integrate_deploy(api_client, api: dict, api_resource: dict, lambda_func: dict, http_method: str='GET') -> str: '\n Integrate lambda with api gw method and deploy api.\n Return the invokation URL\n ' lambda_integration_arn = f"arn:aws:apigateway:us-east-1:lambda:path/2015-03-31/functions/{lambda_func['FunctionArn']}/invocations" api_client.put_integration(restApiId=api['id'], resourceId=api_resource['id'], httpMethod=http_method, type='AWS', integrationHttpMethod='POST', uri=lambda_integration_arn) api_client.create_deployment(restApiId=api['id'], stageName='dev') return f"http://localhost:4566/restapis/{api['id']}/dev/_user_request_{api_resource['path']}"
2,615,488,019,641,436,700
Integrate lambda with api gw method and deploy api. Return the invokation URL
test/api_gw_test.py
api_gw_lambda_integrate_deploy
fredliporace/cbers-2-stac
python
def api_gw_lambda_integrate_deploy(api_client, api: dict, api_resource: dict, lambda_func: dict, http_method: str='GET') -> str: '\n Integrate lambda with api gw method and deploy api.\n Return the invokation URL\n ' lambda_integration_arn = f"arn:aws:apigateway:us-east-1:lambda:path/2015-03-31/functions/{lambda_func['FunctionArn']}/invocations" api_client.put_integration(restApiId=api['id'], resourceId=api_resource['id'], httpMethod=http_method, type='AWS', integrationHttpMethod='POST', uri=lambda_integration_arn) api_client.create_deployment(restApiId=api['id'], stageName='dev') return f"http://localhost:4566/restapis/{api['id']}/dev/_user_request_{api_resource['path']}"
@pytest.fixture def api_gw_method(request): 'api gw for testing' marker = request.node.get_closest_marker('api_gw_method_args') put_method_args = marker.args[0]['put_method_args'] put_method_response_args = marker.args[0]['put_method_response_args'] api = None def fin(): 'fixture finalizer' if api: api_client.delete_rest_api(restApiId=api['id']) request.addfinalizer(fin) api_client = boto3.client('apigateway', endpoint_url=ENDPOINT_URL) api = api_client.create_rest_api(name='testapi') root_resource_id = api_client.get_resources(restApiId=api['id'])['items'][0]['id'] api_resource = api_client.create_resource(restApiId=api['id'], parentId=root_resource_id, pathPart='test') api_client.put_method(restApiId=api['id'], resourceId=api_resource['id'], authorizationType='NONE', **put_method_args) api_client.put_method_response(restApiId=api['id'], resourceId=api_resource['id'], statusCode='200', **put_method_response_args) return (api_client, api, api_resource)
6,460,700,496,959,510,000
api gw for testing
test/api_gw_test.py
api_gw_method
fredliporace/cbers-2-stac
python
@pytest.fixture def api_gw_method(request): marker = request.node.get_closest_marker('api_gw_method_args') put_method_args = marker.args[0]['put_method_args'] put_method_response_args = marker.args[0]['put_method_response_args'] api = None def fin(): 'fixture finalizer' if api: api_client.delete_rest_api(restApiId=api['id']) request.addfinalizer(fin) api_client = boto3.client('apigateway', endpoint_url=ENDPOINT_URL) api = api_client.create_rest_api(name='testapi') root_resource_id = api_client.get_resources(restApiId=api['id'])['items'][0]['id'] api_resource = api_client.create_resource(restApiId=api['id'], parentId=root_resource_id, pathPart='test') api_client.put_method(restApiId=api['id'], resourceId=api_resource['id'], authorizationType='NONE', **put_method_args) api_client.put_method_response(restApiId=api['id'], resourceId=api_resource['id'], statusCode='200', **put_method_response_args) return (api_client, api, api_resource)
@pytest.mark.api_gw_method_args({'put_method_args': {'httpMethod': 'GET'}, 'put_method_response_args': {'httpMethod': 'GET'}}) @pytest.mark.lambda_function_args({'name': 'stac_endpoint', 'handler': 'code.handler', 'environment': {'CBERS_STAC_BUCKET': 'bucket'}, 'timeout': 30, 'layers': ({'output_dir': './test', 'layer_dir': './cbers2stac/layers/common', 'tag': 'common'},)}) def test_root(api_gw_method, lambda_function): '\n test_root_endpoint\n ' (api_client, api, api_resource) = api_gw_method (lambda_client, lambda_func) = lambda_function url = api_gw_lambda_integrate_deploy(api_client, api, api_resource, lambda_func) req = requests.get(url) assert (req.status_code == 200)
472,674,239,671,295,700
test_root_endpoint
test/api_gw_test.py
test_root
fredliporace/cbers-2-stac
python
@pytest.mark.api_gw_method_args({'put_method_args': {'httpMethod': 'GET'}, 'put_method_response_args': {'httpMethod': 'GET'}}) @pytest.mark.lambda_function_args({'name': 'stac_endpoint', 'handler': 'code.handler', 'environment': {'CBERS_STAC_BUCKET': 'bucket'}, 'timeout': 30, 'layers': ({'output_dir': './test', 'layer_dir': './cbers2stac/layers/common', 'tag': 'common'},)}) def test_root(api_gw_method, lambda_function): '\n \n ' (api_client, api, api_resource) = api_gw_method (lambda_client, lambda_func) = lambda_function url = api_gw_lambda_integrate_deploy(api_client, api, api_resource, lambda_func) req = requests.get(url) assert (req.status_code == 200)
@pytest.mark.api_gw_method_args({'put_method_args': {'httpMethod': 'GET'}, 'put_method_response_args': {'httpMethod': 'GET'}}) @pytest.mark.lambda_function_args({'name': 'elasticsearch', 'handler': 'es.stac_search_endpoint_handler', 'environment': {}, 'timeout': 30, 'layers': ({'output_dir': './test', 'layer_dir': './cbers2stac/layers/common', 'tag': 'common'},)}) def test_item_search_get(api_gw_method, lambda_function, es_client): '\n test_item_search_get\n ' (api_client, api, api_resource) = api_gw_method (lambda_client, lambda_func) = lambda_function lambda_client.update_function_configuration(FunctionName=lambda_func['FunctionName'], Environment={'Variables': {'ES_PORT': '4571', 'ES_SSL': 'NO'}}) populate_es_test_case_1(es_client) original_url = api_gw_lambda_integrate_deploy(api_client, api, api_resource, lambda_func) req = requests.get(original_url) assert (req.status_code == 200), req.text fcol = json.loads(req.text) assert (len(fcol['features']) == 2) url = f'{original_url}?collections=CBERS4-MUX' req = requests.get(url) assert (req.status_code == 200), req.text fcol = json.loads(req.text) assert (len(fcol['features']) == 1) assert (fcol['features'][0]['collection'] == 'CBERS4-MUX') url = f'{original_url}?collections=CBERS4-MUX,CBERS4-AWFI' req = requests.get(url) assert (req.status_code == 200), req.text fcol = json.loads(req.text) assert (len(fcol['features']) == 2) url = f'{original_url}' req = requests.get(url) assert (req.status_code == 200), req.text fcol = json.loads(req.text) assert ('links' not in fcol.keys()) url = f'{original_url}?limit=1' req = requests.get(url) assert (req.status_code == 200), req.text fcol = json.loads(req.text) assert ('links' in fcol.keys()) assert (len(fcol['links']) == 1) next_href = to_localstack_url(api['id'], fcol['links'][0]['href']) req = requests.get(next_href) assert (req.status_code == 200), req.text fcol = json.loads(req.text) assert ('links' not in fcol.keys()) assert (fcol['features'][0]['id'] == 'CBERS_4_MUX_20170528_090_084_L2') url = f'{original_url}?ids=CBERS_4_MUX_20170528_090_084_L2' req = requests.get(url) assert (req.status_code == 200), req.text fcol = json.loads(req.text) assert (len(fcol['features']) == 1) assert (fcol['features'][0]['id'] == 'CBERS_4_MUX_20170528_090_084_L2') url = f'{original_url}?' url += urlencode({'query': '{"cbers:data_type": {"eq":"L4"}}'}) req = requests.get(url) assert (req.status_code == 200), req.text fcol = json.loads(req.text) assert (len(fcol['features']) == 1) assert (fcol['features'][0]['id'] == 'CBERS_4_AWFI_20170409_167_123_L4')
6,229,515,960,730,614,000
test_item_search_get
test/api_gw_test.py
test_item_search_get
fredliporace/cbers-2-stac
python
@pytest.mark.api_gw_method_args({'put_method_args': {'httpMethod': 'GET'}, 'put_method_response_args': {'httpMethod': 'GET'}}) @pytest.mark.lambda_function_args({'name': 'elasticsearch', 'handler': 'es.stac_search_endpoint_handler', 'environment': {}, 'timeout': 30, 'layers': ({'output_dir': './test', 'layer_dir': './cbers2stac/layers/common', 'tag': 'common'},)}) def (api_gw_method, lambda_function, es_client): '\n \n ' (api_client, api, api_resource) = api_gw_method (lambda_client, lambda_func) = lambda_function lambda_client.update_function_configuration(FunctionName=lambda_func['FunctionName'], Environment={'Variables': {'ES_PORT': '4571', 'ES_SSL': 'NO'}}) populate_es_test_case_1(es_client) original_url = api_gw_lambda_integrate_deploy(api_client, api, api_resource, lambda_func) req = requests.get(original_url) assert (req.status_code == 200), req.text fcol = json.loads(req.text) assert (len(fcol['features']) == 2) url = f'{original_url}?collections=CBERS4-MUX' req = requests.get(url) assert (req.status_code == 200), req.text fcol = json.loads(req.text) assert (len(fcol['features']) == 1) assert (fcol['features'][0]['collection'] == 'CBERS4-MUX') url = f'{original_url}?collections=CBERS4-MUX,CBERS4-AWFI' req = requests.get(url) assert (req.status_code == 200), req.text fcol = json.loads(req.text) assert (len(fcol['features']) == 2) url = f'{original_url}' req = requests.get(url) assert (req.status_code == 200), req.text fcol = json.loads(req.text) assert ('links' not in fcol.keys()) url = f'{original_url}?limit=1' req = requests.get(url) assert (req.status_code == 200), req.text fcol = json.loads(req.text) assert ('links' in fcol.keys()) assert (len(fcol['links']) == 1) next_href = to_localstack_url(api['id'], fcol['links'][0]['href']) req = requests.get(next_href) assert (req.status_code == 200), req.text fcol = json.loads(req.text) assert ('links' not in fcol.keys()) assert (fcol['features'][0]['id'] == 'CBERS_4_MUX_20170528_090_084_L2') url = f'{original_url}?ids=CBERS_4_MUX_20170528_090_084_L2' req = requests.get(url) assert (req.status_code == 200), req.text fcol = json.loads(req.text) assert (len(fcol['features']) == 1) assert (fcol['features'][0]['id'] == 'CBERS_4_MUX_20170528_090_084_L2') url = f'{original_url}?' url += urlencode({'query': '{"cbers:data_type": {"eq":"L4"}}'}) req = requests.get(url) assert (req.status_code == 200), req.text fcol = json.loads(req.text) assert (len(fcol['features']) == 1) assert (fcol['features'][0]['id'] == 'CBERS_4_AWFI_20170409_167_123_L4')
@pytest.mark.api_gw_method_args({'put_method_args': {'httpMethod': 'POST'}, 'put_method_response_args': {'httpMethod': 'POST'}}) @pytest.mark.lambda_function_args({'name': 'elasticsearch', 'handler': 'es.stac_search_endpoint_handler', 'environment': {}, 'timeout': 30, 'layers': ({'output_dir': './test', 'layer_dir': './cbers2stac/layers/common', 'tag': 'common'},)}) def test_item_search_post(api_gw_method, lambda_function, es_client): '\n test_item_search_post\n ' (api_client, api, api_resource) = api_gw_method (lambda_client, lambda_func) = lambda_function lambda_client.update_function_configuration(FunctionName=lambda_func['FunctionName'], Environment={'Variables': {'ES_PORT': '4571', 'ES_SSL': 'NO'}}) populate_es_test_case_1(es_client) url = api_gw_lambda_integrate_deploy(api_client, api, api_resource, lambda_func, http_method='POST') req = requests.post(url, data=json.dumps({'collections': ['mycollection'], 'bbox': [160.6, (- 55.95), (- 170), (- 25.89)], 'limit': 100, 'datetime': '2019-01-01T00:00:00Z/2019-01-01T23:59:59Z'})) assert (req.status_code == 400), req.text assert ('First lon corner is not western' in req.text) req = requests.post(url, data=json.dumps({'collections': ['mycollection'], 'bbox': [(- 170), (- 25.89), 160.6, (- 55.95)], 'limit': 100, 'datetime': '2019-01-01T00:00:00Z/2019-01-01T23:59:59Z'})) assert (req.status_code == 200), req.text req = requests.post(url) assert (req.status_code == 200), req.text fcol = json.loads(req.text) assert ('links' not in fcol.keys()) body = {'limit': 1} req = requests.post(url, data=json.dumps(body)) assert (req.status_code == 200), req.text fcol = json.loads(req.text) assert ('links' in fcol.keys()) assert (len(fcol['links']) == 1) next_href = to_localstack_url(api['id'], fcol['links'][0]['href']) req = requests.post(next_href, data=json.dumps({**body, **fcol['links'][0]['body']})) assert (req.status_code == 200), req.text fcol = json.loads(req.text) assert ('links' not in fcol.keys()) assert (fcol['features'][0]['id'] == 'CBERS_4_MUX_20170528_090_084_L2') body = {'ids': ['CBERS_4_MUX_20170528_090_084_L2']} req = requests.post(url, data=json.dumps(body)) assert (req.status_code == 200), req.text fcol = json.loads(req.text) assert (len(fcol['features']) == 1) assert (fcol['features'][0]['id'] == 'CBERS_4_MUX_20170528_090_084_L2')
-2,357,401,077,611,262,500
test_item_search_post
test/api_gw_test.py
test_item_search_post
fredliporace/cbers-2-stac
python
@pytest.mark.api_gw_method_args({'put_method_args': {'httpMethod': 'POST'}, 'put_method_response_args': {'httpMethod': 'POST'}}) @pytest.mark.lambda_function_args({'name': 'elasticsearch', 'handler': 'es.stac_search_endpoint_handler', 'environment': {}, 'timeout': 30, 'layers': ({'output_dir': './test', 'layer_dir': './cbers2stac/layers/common', 'tag': 'common'},)}) def (api_gw_method, lambda_function, es_client): '\n \n ' (api_client, api, api_resource) = api_gw_method (lambda_client, lambda_func) = lambda_function lambda_client.update_function_configuration(FunctionName=lambda_func['FunctionName'], Environment={'Variables': {'ES_PORT': '4571', 'ES_SSL': 'NO'}}) populate_es_test_case_1(es_client) url = api_gw_lambda_integrate_deploy(api_client, api, api_resource, lambda_func, http_method='POST') req = requests.post(url, data=json.dumps({'collections': ['mycollection'], 'bbox': [160.6, (- 55.95), (- 170), (- 25.89)], 'limit': 100, 'datetime': '2019-01-01T00:00:00Z/2019-01-01T23:59:59Z'})) assert (req.status_code == 400), req.text assert ('First lon corner is not western' in req.text) req = requests.post(url, data=json.dumps({'collections': ['mycollection'], 'bbox': [(- 170), (- 25.89), 160.6, (- 55.95)], 'limit': 100, 'datetime': '2019-01-01T00:00:00Z/2019-01-01T23:59:59Z'})) assert (req.status_code == 200), req.text req = requests.post(url) assert (req.status_code == 200), req.text fcol = json.loads(req.text) assert ('links' not in fcol.keys()) body = {'limit': 1} req = requests.post(url, data=json.dumps(body)) assert (req.status_code == 200), req.text fcol = json.loads(req.text) assert ('links' in fcol.keys()) assert (len(fcol['links']) == 1) next_href = to_localstack_url(api['id'], fcol['links'][0]['href']) req = requests.post(next_href, data=json.dumps({**body, **fcol['links'][0]['body']})) assert (req.status_code == 200), req.text fcol = json.loads(req.text) assert ('links' not in fcol.keys()) assert (fcol['features'][0]['id'] == 'CBERS_4_MUX_20170528_090_084_L2') body = {'ids': ['CBERS_4_MUX_20170528_090_084_L2']} req = requests.post(url, data=json.dumps(body)) assert (req.status_code == 200), req.text fcol = json.loads(req.text) assert (len(fcol['features']) == 1) assert (fcol['features'][0]['id'] == 'CBERS_4_MUX_20170528_090_084_L2')
def fin(): 'fixture finalizer' if api: api_client.delete_rest_api(restApiId=api['id'])
-8,156,885,776,005,364,000
fixture finalizer
test/api_gw_test.py
fin
fredliporace/cbers-2-stac
python
def fin(): if api: api_client.delete_rest_api(restApiId=api['id'])
def __init__(self, autoencoder_dims, n_clusters, update_interval=50, max_iteration=10000.0, clustering_tol=0.0001, alpha=1.0): 'Construtor of DCE. ' self.autoencoder_dims = autoencoder_dims self.n_clusters = n_clusters self.alpha = alpha self.update_interval = update_interval self.max_iteration = max_iteration self.clustering_tol = clustering_tol self.model = None return
4,826,052,091,919,239,000
Construtor of DCE.
deepchembed/dce.py
__init__
chembed/DeepChEmbed
python
def __init__(self, autoencoder_dims, n_clusters, update_interval=50, max_iteration=10000.0, clustering_tol=0.0001, alpha=1.0): ' ' self.autoencoder_dims = autoencoder_dims self.n_clusters = n_clusters self.alpha = alpha self.update_interval = update_interval self.max_iteration = max_iteration self.clustering_tol = clustering_tol self.model = None return