code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
---|---|---|---|
def rename_file(self, fmfile, newname):
"""Rename file in transfer.
:param fmfile: file data from filemail containing fileid
:param newname: new file name
:type fmfile: ``dict``
:type newname: ``str`` or ``unicode``
:rtype: ``bool``
"""
if not isinstance(fmfile, dict):
raise FMBaseError('fmfile must be a <dict>')
method, url = get_URL('file_rename')
payload = {
'apikey': self.config.get('apikey'),
'logintoken': self.session.cookies.get('logintoken'),
'fileid': fmfile.get('fileid'),
'filename': newname
}
res = getattr(self.session, method)(url, params=payload)
if res.status_code == 200:
self._complete = True
return True
hellraiser(res) | def function[rename_file, parameter[self, fmfile, newname]]:
constant[Rename file in transfer.
:param fmfile: file data from filemail containing fileid
:param newname: new file name
:type fmfile: ``dict``
:type newname: ``str`` or ``unicode``
:rtype: ``bool``
]
if <ast.UnaryOp object at 0x7da20c6aa650> begin[:]
<ast.Raise object at 0x7da20c6aa6e0>
<ast.Tuple object at 0x7da20c6aa260> assign[=] call[name[get_URL], parameter[constant[file_rename]]]
variable[payload] assign[=] dictionary[[<ast.Constant object at 0x7da20c6ab1f0>, <ast.Constant object at 0x7da20c6a9720>, <ast.Constant object at 0x7da20c6a9060>, <ast.Constant object at 0x7da20c6a8e50>], [<ast.Call object at 0x7da20c6aab00>, <ast.Call object at 0x7da20c6a8a00>, <ast.Call object at 0x7da20c6aa3b0>, <ast.Name object at 0x7da20c6aa200>]]
variable[res] assign[=] call[call[name[getattr], parameter[name[self].session, name[method]]], parameter[name[url]]]
if compare[name[res].status_code equal[==] constant[200]] begin[:]
name[self]._complete assign[=] constant[True]
return[constant[True]]
call[name[hellraiser], parameter[name[res]]] | keyword[def] identifier[rename_file] ( identifier[self] , identifier[fmfile] , identifier[newname] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[fmfile] , identifier[dict] ):
keyword[raise] identifier[FMBaseError] ( literal[string] )
identifier[method] , identifier[url] = identifier[get_URL] ( literal[string] )
identifier[payload] ={
literal[string] : identifier[self] . identifier[config] . identifier[get] ( literal[string] ),
literal[string] : identifier[self] . identifier[session] . identifier[cookies] . identifier[get] ( literal[string] ),
literal[string] : identifier[fmfile] . identifier[get] ( literal[string] ),
literal[string] : identifier[newname]
}
identifier[res] = identifier[getattr] ( identifier[self] . identifier[session] , identifier[method] )( identifier[url] , identifier[params] = identifier[payload] )
keyword[if] identifier[res] . identifier[status_code] == literal[int] :
identifier[self] . identifier[_complete] = keyword[True]
keyword[return] keyword[True]
identifier[hellraiser] ( identifier[res] ) | def rename_file(self, fmfile, newname):
"""Rename file in transfer.
:param fmfile: file data from filemail containing fileid
:param newname: new file name
:type fmfile: ``dict``
:type newname: ``str`` or ``unicode``
:rtype: ``bool``
"""
if not isinstance(fmfile, dict):
raise FMBaseError('fmfile must be a <dict>') # depends on [control=['if'], data=[]]
(method, url) = get_URL('file_rename')
payload = {'apikey': self.config.get('apikey'), 'logintoken': self.session.cookies.get('logintoken'), 'fileid': fmfile.get('fileid'), 'filename': newname}
res = getattr(self.session, method)(url, params=payload)
if res.status_code == 200:
self._complete = True
return True # depends on [control=['if'], data=[]]
hellraiser(res) |
def equation_of_time_pvcdrom(dayofyear):
"""
Equation of time from PVCDROM.
`PVCDROM`_ is a website by Solar Power Lab at Arizona State
University (ASU)
.. _PVCDROM: http://www.pveducation.org/pvcdrom/2-properties-sunlight/solar-time
Parameters
----------
dayofyear : numeric
Returns
-------
equation_of_time : numeric
Difference in time between solar time and mean solar time in minutes.
References
----------
[1] Soteris A. Kalogirou, "Solar Energy Engineering Processes and
Systems, 2nd Edition" Elselvier/Academic Press (2009).
See Also
--------
equation_of_time_Spencer71
"""
# day angle relative to Vernal Equinox, typically March 22 (day number 81)
bday = \
_calculate_simple_day_angle(dayofyear) - (2.0 * np.pi / 365.0) * 80.0
# same value but about 2x faster than Spencer (1971)
return 9.87 * np.sin(2.0 * bday) - 7.53 * np.cos(bday) - 1.5 * np.sin(bday) | def function[equation_of_time_pvcdrom, parameter[dayofyear]]:
constant[
Equation of time from PVCDROM.
`PVCDROM`_ is a website by Solar Power Lab at Arizona State
University (ASU)
.. _PVCDROM: http://www.pveducation.org/pvcdrom/2-properties-sunlight/solar-time
Parameters
----------
dayofyear : numeric
Returns
-------
equation_of_time : numeric
Difference in time between solar time and mean solar time in minutes.
References
----------
[1] Soteris A. Kalogirou, "Solar Energy Engineering Processes and
Systems, 2nd Edition" Elselvier/Academic Press (2009).
See Also
--------
equation_of_time_Spencer71
]
variable[bday] assign[=] binary_operation[call[name[_calculate_simple_day_angle], parameter[name[dayofyear]]] - binary_operation[binary_operation[binary_operation[constant[2.0] * name[np].pi] / constant[365.0]] * constant[80.0]]]
return[binary_operation[binary_operation[binary_operation[constant[9.87] * call[name[np].sin, parameter[binary_operation[constant[2.0] * name[bday]]]]] - binary_operation[constant[7.53] * call[name[np].cos, parameter[name[bday]]]]] - binary_operation[constant[1.5] * call[name[np].sin, parameter[name[bday]]]]]] | keyword[def] identifier[equation_of_time_pvcdrom] ( identifier[dayofyear] ):
literal[string]
identifier[bday] = identifier[_calculate_simple_day_angle] ( identifier[dayofyear] )-( literal[int] * identifier[np] . identifier[pi] / literal[int] )* literal[int]
keyword[return] literal[int] * identifier[np] . identifier[sin] ( literal[int] * identifier[bday] )- literal[int] * identifier[np] . identifier[cos] ( identifier[bday] )- literal[int] * identifier[np] . identifier[sin] ( identifier[bday] ) | def equation_of_time_pvcdrom(dayofyear):
"""
Equation of time from PVCDROM.
`PVCDROM`_ is a website by Solar Power Lab at Arizona State
University (ASU)
.. _PVCDROM: http://www.pveducation.org/pvcdrom/2-properties-sunlight/solar-time
Parameters
----------
dayofyear : numeric
Returns
-------
equation_of_time : numeric
Difference in time between solar time and mean solar time in minutes.
References
----------
[1] Soteris A. Kalogirou, "Solar Energy Engineering Processes and
Systems, 2nd Edition" Elselvier/Academic Press (2009).
See Also
--------
equation_of_time_Spencer71
"""
# day angle relative to Vernal Equinox, typically March 22 (day number 81)
bday = _calculate_simple_day_angle(dayofyear) - 2.0 * np.pi / 365.0 * 80.0
# same value but about 2x faster than Spencer (1971)
return 9.87 * np.sin(2.0 * bday) - 7.53 * np.cos(bday) - 1.5 * np.sin(bday) |
def is_valid_mimetype(response):
""" Return ``True`` if the mimetype is not blacklisted.
:rtype: bool
"""
blacklist = [
'image/',
]
mimetype = response.get('mimeType')
if not mimetype:
return True
for bw in blacklist:
if bw in mimetype:
return False
return True | def function[is_valid_mimetype, parameter[response]]:
constant[ Return ``True`` if the mimetype is not blacklisted.
:rtype: bool
]
variable[blacklist] assign[=] list[[<ast.Constant object at 0x7da1b135b520>]]
variable[mimetype] assign[=] call[name[response].get, parameter[constant[mimeType]]]
if <ast.UnaryOp object at 0x7da1b1359240> begin[:]
return[constant[True]]
for taget[name[bw]] in starred[name[blacklist]] begin[:]
if compare[name[bw] in name[mimetype]] begin[:]
return[constant[False]]
return[constant[True]] | keyword[def] identifier[is_valid_mimetype] ( identifier[response] ):
literal[string]
identifier[blacklist] =[
literal[string] ,
]
identifier[mimetype] = identifier[response] . identifier[get] ( literal[string] )
keyword[if] keyword[not] identifier[mimetype] :
keyword[return] keyword[True]
keyword[for] identifier[bw] keyword[in] identifier[blacklist] :
keyword[if] identifier[bw] keyword[in] identifier[mimetype] :
keyword[return] keyword[False]
keyword[return] keyword[True] | def is_valid_mimetype(response):
""" Return ``True`` if the mimetype is not blacklisted.
:rtype: bool
"""
blacklist = ['image/']
mimetype = response.get('mimeType')
if not mimetype:
return True # depends on [control=['if'], data=[]]
for bw in blacklist:
if bw in mimetype:
return False # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['bw']]
return True |
def populate_device(dev):
"""! @brief Generates and populates the target defined by a CmsisPackDevice.
The new target class is added to the `#TARGET` list.
@param dev A CmsisPackDevice object.
"""
try:
tgt = PackTargets._generate_pack_target_class(dev)
if tgt is None:
return
part = dev.part_number.lower()
LOG.debug("Loading target '%s' from CMSIS-Pack", part)
# Make sure there isn't a duplicate target name.
if part not in TARGET:
TARGET[part] = tgt
except (MalformedCmsisPackError, FileNotFoundError_) as err:
LOG.warning(err) | def function[populate_device, parameter[dev]]:
constant[! @brief Generates and populates the target defined by a CmsisPackDevice.
The new target class is added to the `#TARGET` list.
@param dev A CmsisPackDevice object.
]
<ast.Try object at 0x7da1b18ee5f0> | keyword[def] identifier[populate_device] ( identifier[dev] ):
literal[string]
keyword[try] :
identifier[tgt] = identifier[PackTargets] . identifier[_generate_pack_target_class] ( identifier[dev] )
keyword[if] identifier[tgt] keyword[is] keyword[None] :
keyword[return]
identifier[part] = identifier[dev] . identifier[part_number] . identifier[lower] ()
identifier[LOG] . identifier[debug] ( literal[string] , identifier[part] )
keyword[if] identifier[part] keyword[not] keyword[in] identifier[TARGET] :
identifier[TARGET] [ identifier[part] ]= identifier[tgt]
keyword[except] ( identifier[MalformedCmsisPackError] , identifier[FileNotFoundError_] ) keyword[as] identifier[err] :
identifier[LOG] . identifier[warning] ( identifier[err] ) | def populate_device(dev):
"""! @brief Generates and populates the target defined by a CmsisPackDevice.
The new target class is added to the `#TARGET` list.
@param dev A CmsisPackDevice object.
"""
try:
tgt = PackTargets._generate_pack_target_class(dev)
if tgt is None:
return # depends on [control=['if'], data=[]]
part = dev.part_number.lower()
LOG.debug("Loading target '%s' from CMSIS-Pack", part)
# Make sure there isn't a duplicate target name.
if part not in TARGET:
TARGET[part] = tgt # depends on [control=['if'], data=['part', 'TARGET']] # depends on [control=['try'], data=[]]
except (MalformedCmsisPackError, FileNotFoundError_) as err:
LOG.warning(err) # depends on [control=['except'], data=['err']] |
def create(self, to, from_, method=values.unset, fallback_url=values.unset,
fallback_method=values.unset, status_callback=values.unset,
status_callback_event=values.unset,
status_callback_method=values.unset, send_digits=values.unset,
timeout=values.unset, record=values.unset,
recording_channels=values.unset,
recording_status_callback=values.unset,
recording_status_callback_method=values.unset,
sip_auth_username=values.unset, sip_auth_password=values.unset,
machine_detection=values.unset,
machine_detection_timeout=values.unset,
recording_status_callback_event=values.unset, trim=values.unset,
caller_id=values.unset,
machine_detection_speech_threshold=values.unset,
machine_detection_speech_end_threshold=values.unset,
machine_detection_silence_timeout=values.unset, url=values.unset,
application_sid=values.unset):
"""
Create a new CallInstance
:param unicode to: Phone number, SIP address, or client identifier to call
:param unicode from_: Twilio number from which to originate the call
:param unicode method: HTTP method to use to fetch TwiML
:param unicode fallback_url: Fallback URL in case of error
:param unicode fallback_method: HTTP Method to use with fallback_url
:param unicode status_callback: The URL we should call to send status information to your application
:param unicode status_callback_event: The call progress events that we send to the `status_callback` URL.
:param unicode status_callback_method: HTTP Method to use with status_callback
:param unicode send_digits: The digits to dial after connecting to the number
:param unicode timeout: Number of seconds to wait for an answer
:param bool record: Whether or not to record the call
:param unicode recording_channels: The number of channels in the final recording
:param unicode recording_status_callback: The URL that we call when the recording is available to be accessed
:param unicode recording_status_callback_method: The HTTP method we should use when calling the `recording_status_callback` URL
:param unicode sip_auth_username: The username used to authenticate the caller making a SIP call
:param unicode sip_auth_password: The password required to authenticate the user account specified in `sip_auth_username`.
:param unicode machine_detection: Enable machine detection or end of greeting detection
:param unicode machine_detection_timeout: Number of seconds to wait for machine detection
:param unicode recording_status_callback_event: The recording status events that will trigger calls to the URL specified in `recording_status_callback`
:param unicode trim: Set this parameter to control trimming of silence on the recording.
:param unicode caller_id: The phone number, SIP address, or Client identifier that made this call. Phone numbers are in E.164 format (e.g., +16175551212). SIP addresses are formatted as `[email protected]`.
:param unicode machine_detection_speech_threshold: Number of milliseconds for measuring stick for the length of the speech activity
:param unicode machine_detection_speech_end_threshold: Number of milliseconds of silence after speech activity
:param unicode machine_detection_silence_timeout: Number of milliseconds of initial silence
:param unicode url: The absolute URL that returns TwiML for this call
:param unicode application_sid: The SID of the Application resource that will handle the call
:returns: Newly created CallInstance
:rtype: twilio.rest.api.v2010.account.call.CallInstance
"""
data = values.of({
'To': to,
'From': from_,
'Url': url,
'ApplicationSid': application_sid,
'Method': method,
'FallbackUrl': fallback_url,
'FallbackMethod': fallback_method,
'StatusCallback': status_callback,
'StatusCallbackEvent': serialize.map(status_callback_event, lambda e: e),
'StatusCallbackMethod': status_callback_method,
'SendDigits': send_digits,
'Timeout': timeout,
'Record': record,
'RecordingChannels': recording_channels,
'RecordingStatusCallback': recording_status_callback,
'RecordingStatusCallbackMethod': recording_status_callback_method,
'SipAuthUsername': sip_auth_username,
'SipAuthPassword': sip_auth_password,
'MachineDetection': machine_detection,
'MachineDetectionTimeout': machine_detection_timeout,
'RecordingStatusCallbackEvent': serialize.map(recording_status_callback_event, lambda e: e),
'Trim': trim,
'CallerId': caller_id,
'MachineDetectionSpeechThreshold': machine_detection_speech_threshold,
'MachineDetectionSpeechEndThreshold': machine_detection_speech_end_threshold,
'MachineDetectionSilenceTimeout': machine_detection_silence_timeout,
})
payload = self._version.create(
'POST',
self._uri,
data=data,
)
return CallInstance(self._version, payload, account_sid=self._solution['account_sid'], ) | def function[create, parameter[self, to, from_, method, fallback_url, fallback_method, status_callback, status_callback_event, status_callback_method, send_digits, timeout, record, recording_channels, recording_status_callback, recording_status_callback_method, sip_auth_username, sip_auth_password, machine_detection, machine_detection_timeout, recording_status_callback_event, trim, caller_id, machine_detection_speech_threshold, machine_detection_speech_end_threshold, machine_detection_silence_timeout, url, application_sid]]:
constant[
Create a new CallInstance
:param unicode to: Phone number, SIP address, or client identifier to call
:param unicode from_: Twilio number from which to originate the call
:param unicode method: HTTP method to use to fetch TwiML
:param unicode fallback_url: Fallback URL in case of error
:param unicode fallback_method: HTTP Method to use with fallback_url
:param unicode status_callback: The URL we should call to send status information to your application
:param unicode status_callback_event: The call progress events that we send to the `status_callback` URL.
:param unicode status_callback_method: HTTP Method to use with status_callback
:param unicode send_digits: The digits to dial after connecting to the number
:param unicode timeout: Number of seconds to wait for an answer
:param bool record: Whether or not to record the call
:param unicode recording_channels: The number of channels in the final recording
:param unicode recording_status_callback: The URL that we call when the recording is available to be accessed
:param unicode recording_status_callback_method: The HTTP method we should use when calling the `recording_status_callback` URL
:param unicode sip_auth_username: The username used to authenticate the caller making a SIP call
:param unicode sip_auth_password: The password required to authenticate the user account specified in `sip_auth_username`.
:param unicode machine_detection: Enable machine detection or end of greeting detection
:param unicode machine_detection_timeout: Number of seconds to wait for machine detection
:param unicode recording_status_callback_event: The recording status events that will trigger calls to the URL specified in `recording_status_callback`
:param unicode trim: Set this parameter to control trimming of silence on the recording.
:param unicode caller_id: The phone number, SIP address, or Client identifier that made this call. Phone numbers are in E.164 format (e.g., +16175551212). SIP addresses are formatted as `[email protected]`.
:param unicode machine_detection_speech_threshold: Number of milliseconds for measuring stick for the length of the speech activity
:param unicode machine_detection_speech_end_threshold: Number of milliseconds of silence after speech activity
:param unicode machine_detection_silence_timeout: Number of milliseconds of initial silence
:param unicode url: The absolute URL that returns TwiML for this call
:param unicode application_sid: The SID of the Application resource that will handle the call
:returns: Newly created CallInstance
:rtype: twilio.rest.api.v2010.account.call.CallInstance
]
variable[data] assign[=] call[name[values].of, parameter[dictionary[[<ast.Constant object at 0x7da20c6aa4a0>, <ast.Constant object at 0x7da20c6a8520>, <ast.Constant object at 0x7da1b1eac0a0>, <ast.Constant object at 0x7da1b1eac550>, <ast.Constant object at 0x7da1b1eafa60>, <ast.Constant object at 0x7da1b1eaee00>, <ast.Constant object at 0x7da1b1eaf970>, <ast.Constant object at 0x7da1b1eac1c0>, <ast.Constant object at 0x7da1b1eacca0>, <ast.Constant object at 0x7da1b1eaf1c0>, <ast.Constant object at 0x7da1b1ead8a0>, <ast.Constant object at 0x7da1b1eaffd0>, <ast.Constant object at 0x7da1b1eaf220>, <ast.Constant object at 0x7da1b1eaca30>, <ast.Constant object at 0x7da1b1eae800>, <ast.Constant object at 0x7da1b1eaf040>, <ast.Constant object at 0x7da1b1eacd60>, <ast.Constant object at 0x7da1b1eafc40>, <ast.Constant object at 0x7da1b1ead690>, <ast.Constant object at 0x7da1b1eaf3d0>, <ast.Constant object at 0x7da1b1eacd00>, <ast.Constant object at 0x7da1b1eac430>, <ast.Constant object at 0x7da1b1eac2e0>, <ast.Constant object at 0x7da1b1eac580>, <ast.Constant object at 0x7da1b1ead180>, <ast.Constant object at 0x7da1b1ead570>], [<ast.Name object at 0x7da1b1eaecb0>, <ast.Name object at 0x7da1b1eaff10>, <ast.Name object at 0x7da1b1eae170>, <ast.Name object at 0x7da1b1ead7b0>, <ast.Name object at 0x7da1b1eaed70>, <ast.Name object at 0x7da1b1eadae0>, <ast.Name object at 0x7da1b1ead2a0>, <ast.Name object at 0x7da1b1eafac0>, <ast.Call object at 0x7da1b1eaea10>, <ast.Name object at 0x7da1b1eaea40>, <ast.Name object at 0x7da1b1ead2d0>, <ast.Name object at 0x7da1b1eaf460>, <ast.Name object at 0x7da1b1eaf5e0>, <ast.Name object at 0x7da1b1eaf1f0>, <ast.Name object at 0x7da1b1eaf280>, <ast.Name object at 0x7da1b1ead780>, <ast.Name object at 0x7da1b1eae9b0>, <ast.Name object at 0x7da1b1eae3e0>, <ast.Name object at 0x7da1b1eadf30>, <ast.Name object at 0x7da1b1ead9c0>, <ast.Call object at 0x7da1b1ead540>, <ast.Name object at 0x7da1b1eaf910>, <ast.Name object at 0x7da1b1eac340>, <ast.Name object at 0x7da1b1ead900>, <ast.Name object at 0x7da1b1eacb20>, <ast.Name object at 0x7da1b1eaf340>]]]]
variable[payload] assign[=] call[name[self]._version.create, parameter[constant[POST], name[self]._uri]]
return[call[name[CallInstance], parameter[name[self]._version, name[payload]]]] | keyword[def] identifier[create] ( identifier[self] , identifier[to] , identifier[from_] , identifier[method] = identifier[values] . identifier[unset] , identifier[fallback_url] = identifier[values] . identifier[unset] ,
identifier[fallback_method] = identifier[values] . identifier[unset] , identifier[status_callback] = identifier[values] . identifier[unset] ,
identifier[status_callback_event] = identifier[values] . identifier[unset] ,
identifier[status_callback_method] = identifier[values] . identifier[unset] , identifier[send_digits] = identifier[values] . identifier[unset] ,
identifier[timeout] = identifier[values] . identifier[unset] , identifier[record] = identifier[values] . identifier[unset] ,
identifier[recording_channels] = identifier[values] . identifier[unset] ,
identifier[recording_status_callback] = identifier[values] . identifier[unset] ,
identifier[recording_status_callback_method] = identifier[values] . identifier[unset] ,
identifier[sip_auth_username] = identifier[values] . identifier[unset] , identifier[sip_auth_password] = identifier[values] . identifier[unset] ,
identifier[machine_detection] = identifier[values] . identifier[unset] ,
identifier[machine_detection_timeout] = identifier[values] . identifier[unset] ,
identifier[recording_status_callback_event] = identifier[values] . identifier[unset] , identifier[trim] = identifier[values] . identifier[unset] ,
identifier[caller_id] = identifier[values] . identifier[unset] ,
identifier[machine_detection_speech_threshold] = identifier[values] . identifier[unset] ,
identifier[machine_detection_speech_end_threshold] = identifier[values] . identifier[unset] ,
identifier[machine_detection_silence_timeout] = identifier[values] . identifier[unset] , identifier[url] = identifier[values] . identifier[unset] ,
identifier[application_sid] = identifier[values] . identifier[unset] ):
literal[string]
identifier[data] = identifier[values] . identifier[of] ({
literal[string] : identifier[to] ,
literal[string] : identifier[from_] ,
literal[string] : identifier[url] ,
literal[string] : identifier[application_sid] ,
literal[string] : identifier[method] ,
literal[string] : identifier[fallback_url] ,
literal[string] : identifier[fallback_method] ,
literal[string] : identifier[status_callback] ,
literal[string] : identifier[serialize] . identifier[map] ( identifier[status_callback_event] , keyword[lambda] identifier[e] : identifier[e] ),
literal[string] : identifier[status_callback_method] ,
literal[string] : identifier[send_digits] ,
literal[string] : identifier[timeout] ,
literal[string] : identifier[record] ,
literal[string] : identifier[recording_channels] ,
literal[string] : identifier[recording_status_callback] ,
literal[string] : identifier[recording_status_callback_method] ,
literal[string] : identifier[sip_auth_username] ,
literal[string] : identifier[sip_auth_password] ,
literal[string] : identifier[machine_detection] ,
literal[string] : identifier[machine_detection_timeout] ,
literal[string] : identifier[serialize] . identifier[map] ( identifier[recording_status_callback_event] , keyword[lambda] identifier[e] : identifier[e] ),
literal[string] : identifier[trim] ,
literal[string] : identifier[caller_id] ,
literal[string] : identifier[machine_detection_speech_threshold] ,
literal[string] : identifier[machine_detection_speech_end_threshold] ,
literal[string] : identifier[machine_detection_silence_timeout] ,
})
identifier[payload] = identifier[self] . identifier[_version] . identifier[create] (
literal[string] ,
identifier[self] . identifier[_uri] ,
identifier[data] = identifier[data] ,
)
keyword[return] identifier[CallInstance] ( identifier[self] . identifier[_version] , identifier[payload] , identifier[account_sid] = identifier[self] . identifier[_solution] [ literal[string] ],) | def create(self, to, from_, method=values.unset, fallback_url=values.unset, fallback_method=values.unset, status_callback=values.unset, status_callback_event=values.unset, status_callback_method=values.unset, send_digits=values.unset, timeout=values.unset, record=values.unset, recording_channels=values.unset, recording_status_callback=values.unset, recording_status_callback_method=values.unset, sip_auth_username=values.unset, sip_auth_password=values.unset, machine_detection=values.unset, machine_detection_timeout=values.unset, recording_status_callback_event=values.unset, trim=values.unset, caller_id=values.unset, machine_detection_speech_threshold=values.unset, machine_detection_speech_end_threshold=values.unset, machine_detection_silence_timeout=values.unset, url=values.unset, application_sid=values.unset):
"""
Create a new CallInstance
:param unicode to: Phone number, SIP address, or client identifier to call
:param unicode from_: Twilio number from which to originate the call
:param unicode method: HTTP method to use to fetch TwiML
:param unicode fallback_url: Fallback URL in case of error
:param unicode fallback_method: HTTP Method to use with fallback_url
:param unicode status_callback: The URL we should call to send status information to your application
:param unicode status_callback_event: The call progress events that we send to the `status_callback` URL.
:param unicode status_callback_method: HTTP Method to use with status_callback
:param unicode send_digits: The digits to dial after connecting to the number
:param unicode timeout: Number of seconds to wait for an answer
:param bool record: Whether or not to record the call
:param unicode recording_channels: The number of channels in the final recording
:param unicode recording_status_callback: The URL that we call when the recording is available to be accessed
:param unicode recording_status_callback_method: The HTTP method we should use when calling the `recording_status_callback` URL
:param unicode sip_auth_username: The username used to authenticate the caller making a SIP call
:param unicode sip_auth_password: The password required to authenticate the user account specified in `sip_auth_username`.
:param unicode machine_detection: Enable machine detection or end of greeting detection
:param unicode machine_detection_timeout: Number of seconds to wait for machine detection
:param unicode recording_status_callback_event: The recording status events that will trigger calls to the URL specified in `recording_status_callback`
:param unicode trim: Set this parameter to control trimming of silence on the recording.
:param unicode caller_id: The phone number, SIP address, or Client identifier that made this call. Phone numbers are in E.164 format (e.g., +16175551212). SIP addresses are formatted as `[email protected]`.
:param unicode machine_detection_speech_threshold: Number of milliseconds for measuring stick for the length of the speech activity
:param unicode machine_detection_speech_end_threshold: Number of milliseconds of silence after speech activity
:param unicode machine_detection_silence_timeout: Number of milliseconds of initial silence
:param unicode url: The absolute URL that returns TwiML for this call
:param unicode application_sid: The SID of the Application resource that will handle the call
:returns: Newly created CallInstance
:rtype: twilio.rest.api.v2010.account.call.CallInstance
"""
data = values.of({'To': to, 'From': from_, 'Url': url, 'ApplicationSid': application_sid, 'Method': method, 'FallbackUrl': fallback_url, 'FallbackMethod': fallback_method, 'StatusCallback': status_callback, 'StatusCallbackEvent': serialize.map(status_callback_event, lambda e: e), 'StatusCallbackMethod': status_callback_method, 'SendDigits': send_digits, 'Timeout': timeout, 'Record': record, 'RecordingChannels': recording_channels, 'RecordingStatusCallback': recording_status_callback, 'RecordingStatusCallbackMethod': recording_status_callback_method, 'SipAuthUsername': sip_auth_username, 'SipAuthPassword': sip_auth_password, 'MachineDetection': machine_detection, 'MachineDetectionTimeout': machine_detection_timeout, 'RecordingStatusCallbackEvent': serialize.map(recording_status_callback_event, lambda e: e), 'Trim': trim, 'CallerId': caller_id, 'MachineDetectionSpeechThreshold': machine_detection_speech_threshold, 'MachineDetectionSpeechEndThreshold': machine_detection_speech_end_threshold, 'MachineDetectionSilenceTimeout': machine_detection_silence_timeout})
payload = self._version.create('POST', self._uri, data=data)
return CallInstance(self._version, payload, account_sid=self._solution['account_sid']) |
def msvc14_get_vc_env(plat_spec):
"""
Patched "distutils._msvccompiler._get_vc_env" for support extra
compilers.
Set environment without use of "vcvarsall.bat".
Known supported compilers
-------------------------
Microsoft Visual C++ 14.0:
Microsoft Visual C++ Build Tools 2015 (x86, x64, arm)
Microsoft Visual Studio 2017 (x86, x64, arm, arm64)
Microsoft Visual Studio Build Tools 2017 (x86, x64, arm, arm64)
Parameters
----------
plat_spec: str
Target architecture.
Return
------
environment: dict
"""
# Try to get environment from vcvarsall.bat (Classical way)
try:
return get_unpatched(msvc14_get_vc_env)(plat_spec)
except distutils.errors.DistutilsPlatformError:
# Pass error Vcvarsall.bat is missing
pass
# If error, try to set environment directly
try:
return EnvironmentInfo(plat_spec, vc_min_ver=14.0).return_env()
except distutils.errors.DistutilsPlatformError as exc:
_augment_exception(exc, 14.0)
raise | def function[msvc14_get_vc_env, parameter[plat_spec]]:
constant[
Patched "distutils._msvccompiler._get_vc_env" for support extra
compilers.
Set environment without use of "vcvarsall.bat".
Known supported compilers
-------------------------
Microsoft Visual C++ 14.0:
Microsoft Visual C++ Build Tools 2015 (x86, x64, arm)
Microsoft Visual Studio 2017 (x86, x64, arm, arm64)
Microsoft Visual Studio Build Tools 2017 (x86, x64, arm, arm64)
Parameters
----------
plat_spec: str
Target architecture.
Return
------
environment: dict
]
<ast.Try object at 0x7da1b1b851e0>
<ast.Try object at 0x7da1b1b86800> | keyword[def] identifier[msvc14_get_vc_env] ( identifier[plat_spec] ):
literal[string]
keyword[try] :
keyword[return] identifier[get_unpatched] ( identifier[msvc14_get_vc_env] )( identifier[plat_spec] )
keyword[except] identifier[distutils] . identifier[errors] . identifier[DistutilsPlatformError] :
keyword[pass]
keyword[try] :
keyword[return] identifier[EnvironmentInfo] ( identifier[plat_spec] , identifier[vc_min_ver] = literal[int] ). identifier[return_env] ()
keyword[except] identifier[distutils] . identifier[errors] . identifier[DistutilsPlatformError] keyword[as] identifier[exc] :
identifier[_augment_exception] ( identifier[exc] , literal[int] )
keyword[raise] | def msvc14_get_vc_env(plat_spec):
"""
Patched "distutils._msvccompiler._get_vc_env" for support extra
compilers.
Set environment without use of "vcvarsall.bat".
Known supported compilers
-------------------------
Microsoft Visual C++ 14.0:
Microsoft Visual C++ Build Tools 2015 (x86, x64, arm)
Microsoft Visual Studio 2017 (x86, x64, arm, arm64)
Microsoft Visual Studio Build Tools 2017 (x86, x64, arm, arm64)
Parameters
----------
plat_spec: str
Target architecture.
Return
------
environment: dict
"""
# Try to get environment from vcvarsall.bat (Classical way)
try:
return get_unpatched(msvc14_get_vc_env)(plat_spec) # depends on [control=['try'], data=[]]
except distutils.errors.DistutilsPlatformError:
# Pass error Vcvarsall.bat is missing
pass # depends on [control=['except'], data=[]]
# If error, try to set environment directly
try:
return EnvironmentInfo(plat_spec, vc_min_ver=14.0).return_env() # depends on [control=['try'], data=[]]
except distutils.errors.DistutilsPlatformError as exc:
_augment_exception(exc, 14.0)
raise # depends on [control=['except'], data=['exc']] |
def _unlockSim(self, pin):
""" Unlocks the SIM card using the specified PIN (if necessary, else does nothing) """
# Unlock the SIM card if needed
try:
cpinResponse = lineStartingWith('+CPIN', self.write('AT+CPIN?', timeout=0.25))
except TimeoutException as timeout:
# Wavecom modems do not end +CPIN responses with "OK" (github issue #19) - see if just the +CPIN response was returned
if timeout.data != None:
cpinResponse = lineStartingWith('+CPIN', timeout.data)
if cpinResponse == None:
# No useful response read
raise timeout
else:
# Nothing read (real timeout)
raise timeout
if cpinResponse != '+CPIN: READY':
if pin != None:
self.write('AT+CPIN="{0}"'.format(pin))
else:
raise PinRequiredError('AT+CPIN') | def function[_unlockSim, parameter[self, pin]]:
constant[ Unlocks the SIM card using the specified PIN (if necessary, else does nothing) ]
<ast.Try object at 0x7da18eb56440>
if compare[name[cpinResponse] not_equal[!=] constant[+CPIN: READY]] begin[:]
if compare[name[pin] not_equal[!=] constant[None]] begin[:]
call[name[self].write, parameter[call[constant[AT+CPIN="{0}"].format, parameter[name[pin]]]]] | keyword[def] identifier[_unlockSim] ( identifier[self] , identifier[pin] ):
literal[string]
keyword[try] :
identifier[cpinResponse] = identifier[lineStartingWith] ( literal[string] , identifier[self] . identifier[write] ( literal[string] , identifier[timeout] = literal[int] ))
keyword[except] identifier[TimeoutException] keyword[as] identifier[timeout] :
keyword[if] identifier[timeout] . identifier[data] != keyword[None] :
identifier[cpinResponse] = identifier[lineStartingWith] ( literal[string] , identifier[timeout] . identifier[data] )
keyword[if] identifier[cpinResponse] == keyword[None] :
keyword[raise] identifier[timeout]
keyword[else] :
keyword[raise] identifier[timeout]
keyword[if] identifier[cpinResponse] != literal[string] :
keyword[if] identifier[pin] != keyword[None] :
identifier[self] . identifier[write] ( literal[string] . identifier[format] ( identifier[pin] ))
keyword[else] :
keyword[raise] identifier[PinRequiredError] ( literal[string] ) | def _unlockSim(self, pin):
""" Unlocks the SIM card using the specified PIN (if necessary, else does nothing) """
# Unlock the SIM card if needed
try:
cpinResponse = lineStartingWith('+CPIN', self.write('AT+CPIN?', timeout=0.25)) # depends on [control=['try'], data=[]]
except TimeoutException as timeout:
# Wavecom modems do not end +CPIN responses with "OK" (github issue #19) - see if just the +CPIN response was returned
if timeout.data != None:
cpinResponse = lineStartingWith('+CPIN', timeout.data)
if cpinResponse == None:
# No useful response read
raise timeout # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
# Nothing read (real timeout)
raise timeout # depends on [control=['except'], data=['timeout']]
if cpinResponse != '+CPIN: READY':
if pin != None:
self.write('AT+CPIN="{0}"'.format(pin)) # depends on [control=['if'], data=['pin']]
else:
raise PinRequiredError('AT+CPIN') # depends on [control=['if'], data=[]] |
def grant_client(self, client_id):
"""
Grant the given client id all the scopes and authorities
needed to work with the access control service.
"""
zone = self.service.settings.data['zone']['oauth-scope']
scopes = ['openid', zone,
'acs.policies.read', 'acs.attributes.read',
'acs.policies.write', 'acs.attributes.write']
authorities = ['uaa.resource', zone,
'acs.policies.read', 'acs.policies.write',
'acs.attributes.read', 'acs.attributes.write']
self.service.uaa.uaac.update_client_grants(client_id, scope=scopes,
authorities=authorities)
return self.service.uaa.uaac.get_client(client_id) | def function[grant_client, parameter[self, client_id]]:
constant[
Grant the given client id all the scopes and authorities
needed to work with the access control service.
]
variable[zone] assign[=] call[call[name[self].service.settings.data][constant[zone]]][constant[oauth-scope]]
variable[scopes] assign[=] list[[<ast.Constant object at 0x7da18f00fe50>, <ast.Name object at 0x7da18f00f490>, <ast.Constant object at 0x7da18f00f550>, <ast.Constant object at 0x7da18f00d570>, <ast.Constant object at 0x7da18f00e440>, <ast.Constant object at 0x7da18f00c970>]]
variable[authorities] assign[=] list[[<ast.Constant object at 0x7da18f00d0f0>, <ast.Name object at 0x7da18f00d600>, <ast.Constant object at 0x7da18f00c8e0>, <ast.Constant object at 0x7da18f00fdf0>, <ast.Constant object at 0x7da18f00dcf0>, <ast.Constant object at 0x7da18f00f160>]]
call[name[self].service.uaa.uaac.update_client_grants, parameter[name[client_id]]]
return[call[name[self].service.uaa.uaac.get_client, parameter[name[client_id]]]] | keyword[def] identifier[grant_client] ( identifier[self] , identifier[client_id] ):
literal[string]
identifier[zone] = identifier[self] . identifier[service] . identifier[settings] . identifier[data] [ literal[string] ][ literal[string] ]
identifier[scopes] =[ literal[string] , identifier[zone] ,
literal[string] , literal[string] ,
literal[string] , literal[string] ]
identifier[authorities] =[ literal[string] , identifier[zone] ,
literal[string] , literal[string] ,
literal[string] , literal[string] ]
identifier[self] . identifier[service] . identifier[uaa] . identifier[uaac] . identifier[update_client_grants] ( identifier[client_id] , identifier[scope] = identifier[scopes] ,
identifier[authorities] = identifier[authorities] )
keyword[return] identifier[self] . identifier[service] . identifier[uaa] . identifier[uaac] . identifier[get_client] ( identifier[client_id] ) | def grant_client(self, client_id):
"""
Grant the given client id all the scopes and authorities
needed to work with the access control service.
"""
zone = self.service.settings.data['zone']['oauth-scope']
scopes = ['openid', zone, 'acs.policies.read', 'acs.attributes.read', 'acs.policies.write', 'acs.attributes.write']
authorities = ['uaa.resource', zone, 'acs.policies.read', 'acs.policies.write', 'acs.attributes.read', 'acs.attributes.write']
self.service.uaa.uaac.update_client_grants(client_id, scope=scopes, authorities=authorities)
return self.service.uaa.uaac.get_client(client_id) |
def check_address(address):
"""
verify that a string is a base58check address
>>> check_address('16EMaNw3pkn3v6f2BgnSSs53zAKH4Q8YJg')
True
>>> check_address('16EMaNw3pkn3v6f2BgnSSs53zAKH4Q8YJh')
False
>>> check_address('mkkJsS22dnDJhD8duFkpGnHNr9uz3JEcWu')
True
>>> check_address('mkkJsS22dnDJhD8duFkpGnHNr9uz3JEcWv')
False
>>> check_address('MD8WooqTKmwromdMQfSNh8gPTPCSf8KaZj')
True
>>> check_address('SSXMcDiCZ7yFSQSUj7mWzmDcdwYhq97p2i')
True
>>> check_address('SSXMcDiCZ7yFSQSUj7mWzmDcdwYhq97p2j')
False
>>> check_address('16SuThrz')
False
>>> check_address('1TGKrgtrQjgoPjoa5BnUZ9Qu')
False
>>> check_address('1LPckRbeTfLjzrfTfnCtP7z2GxFTpZLafXi')
True
"""
if not check_string(address, min_length=26, max_length=35, pattern=OP_ADDRESS_PATTERN):
return False
try:
keylib.b58check_decode(address)
return True
except:
return False | def function[check_address, parameter[address]]:
constant[
verify that a string is a base58check address
>>> check_address('16EMaNw3pkn3v6f2BgnSSs53zAKH4Q8YJg')
True
>>> check_address('16EMaNw3pkn3v6f2BgnSSs53zAKH4Q8YJh')
False
>>> check_address('mkkJsS22dnDJhD8duFkpGnHNr9uz3JEcWu')
True
>>> check_address('mkkJsS22dnDJhD8duFkpGnHNr9uz3JEcWv')
False
>>> check_address('MD8WooqTKmwromdMQfSNh8gPTPCSf8KaZj')
True
>>> check_address('SSXMcDiCZ7yFSQSUj7mWzmDcdwYhq97p2i')
True
>>> check_address('SSXMcDiCZ7yFSQSUj7mWzmDcdwYhq97p2j')
False
>>> check_address('16SuThrz')
False
>>> check_address('1TGKrgtrQjgoPjoa5BnUZ9Qu')
False
>>> check_address('1LPckRbeTfLjzrfTfnCtP7z2GxFTpZLafXi')
True
]
if <ast.UnaryOp object at 0x7da20c6aa290> begin[:]
return[constant[False]]
<ast.Try object at 0x7da20e9631c0> | keyword[def] identifier[check_address] ( identifier[address] ):
literal[string]
keyword[if] keyword[not] identifier[check_string] ( identifier[address] , identifier[min_length] = literal[int] , identifier[max_length] = literal[int] , identifier[pattern] = identifier[OP_ADDRESS_PATTERN] ):
keyword[return] keyword[False]
keyword[try] :
identifier[keylib] . identifier[b58check_decode] ( identifier[address] )
keyword[return] keyword[True]
keyword[except] :
keyword[return] keyword[False] | def check_address(address):
"""
verify that a string is a base58check address
>>> check_address('16EMaNw3pkn3v6f2BgnSSs53zAKH4Q8YJg')
True
>>> check_address('16EMaNw3pkn3v6f2BgnSSs53zAKH4Q8YJh')
False
>>> check_address('mkkJsS22dnDJhD8duFkpGnHNr9uz3JEcWu')
True
>>> check_address('mkkJsS22dnDJhD8duFkpGnHNr9uz3JEcWv')
False
>>> check_address('MD8WooqTKmwromdMQfSNh8gPTPCSf8KaZj')
True
>>> check_address('SSXMcDiCZ7yFSQSUj7mWzmDcdwYhq97p2i')
True
>>> check_address('SSXMcDiCZ7yFSQSUj7mWzmDcdwYhq97p2j')
False
>>> check_address('16SuThrz')
False
>>> check_address('1TGKrgtrQjgoPjoa5BnUZ9Qu')
False
>>> check_address('1LPckRbeTfLjzrfTfnCtP7z2GxFTpZLafXi')
True
"""
if not check_string(address, min_length=26, max_length=35, pattern=OP_ADDRESS_PATTERN):
return False # depends on [control=['if'], data=[]]
try:
keylib.b58check_decode(address)
return True # depends on [control=['try'], data=[]]
except:
return False # depends on [control=['except'], data=[]] |
def generate_http_basic_token(username, password):
"""
Generates a HTTP basic token from username and password
Returns a token string (not a byte)
"""
token = base64.b64encode('{}:{}'.format(username, password).encode('utf-8')).decode('utf-8')
return token | def function[generate_http_basic_token, parameter[username, password]]:
constant[
Generates a HTTP basic token from username and password
Returns a token string (not a byte)
]
variable[token] assign[=] call[call[name[base64].b64encode, parameter[call[call[constant[{}:{}].format, parameter[name[username], name[password]]].encode, parameter[constant[utf-8]]]]].decode, parameter[constant[utf-8]]]
return[name[token]] | keyword[def] identifier[generate_http_basic_token] ( identifier[username] , identifier[password] ):
literal[string]
identifier[token] = identifier[base64] . identifier[b64encode] ( literal[string] . identifier[format] ( identifier[username] , identifier[password] ). identifier[encode] ( literal[string] )). identifier[decode] ( literal[string] )
keyword[return] identifier[token] | def generate_http_basic_token(username, password):
"""
Generates a HTTP basic token from username and password
Returns a token string (not a byte)
"""
token = base64.b64encode('{}:{}'.format(username, password).encode('utf-8')).decode('utf-8')
return token |
def record_add_fields(rec, tag, fields, field_position_local=None,
field_position_global=None):
"""
Add the fields into the record at the required position.
The position is specified by the tag and the field_position_local in the
list of fields.
:param rec: a record structure
:param tag: the tag of the fields to be moved
:param field_position_local: the field_position_local to which the field
will be inserted. If not specified, appends
the fields to the tag.
:param a: list of fields to be added
:return: -1 if the operation failed, or the field_position_local if it was
successful
"""
if field_position_local is None and field_position_global is None:
for field in fields:
record_add_field(
rec, tag, ind1=field[1],
ind2=field[2], subfields=field[0],
controlfield_value=field[3])
else:
fields.reverse()
for field in fields:
record_add_field(
rec, tag, ind1=field[1], ind2=field[2],
subfields=field[0], controlfield_value=field[3],
field_position_local=field_position_local,
field_position_global=field_position_global)
return field_position_local | def function[record_add_fields, parameter[rec, tag, fields, field_position_local, field_position_global]]:
constant[
Add the fields into the record at the required position.
The position is specified by the tag and the field_position_local in the
list of fields.
:param rec: a record structure
:param tag: the tag of the fields to be moved
:param field_position_local: the field_position_local to which the field
will be inserted. If not specified, appends
the fields to the tag.
:param a: list of fields to be added
:return: -1 if the operation failed, or the field_position_local if it was
successful
]
if <ast.BoolOp object at 0x7da204566200> begin[:]
for taget[name[field]] in starred[name[fields]] begin[:]
call[name[record_add_field], parameter[name[rec], name[tag]]]
return[name[field_position_local]] | keyword[def] identifier[record_add_fields] ( identifier[rec] , identifier[tag] , identifier[fields] , identifier[field_position_local] = keyword[None] ,
identifier[field_position_global] = keyword[None] ):
literal[string]
keyword[if] identifier[field_position_local] keyword[is] keyword[None] keyword[and] identifier[field_position_global] keyword[is] keyword[None] :
keyword[for] identifier[field] keyword[in] identifier[fields] :
identifier[record_add_field] (
identifier[rec] , identifier[tag] , identifier[ind1] = identifier[field] [ literal[int] ],
identifier[ind2] = identifier[field] [ literal[int] ], identifier[subfields] = identifier[field] [ literal[int] ],
identifier[controlfield_value] = identifier[field] [ literal[int] ])
keyword[else] :
identifier[fields] . identifier[reverse] ()
keyword[for] identifier[field] keyword[in] identifier[fields] :
identifier[record_add_field] (
identifier[rec] , identifier[tag] , identifier[ind1] = identifier[field] [ literal[int] ], identifier[ind2] = identifier[field] [ literal[int] ],
identifier[subfields] = identifier[field] [ literal[int] ], identifier[controlfield_value] = identifier[field] [ literal[int] ],
identifier[field_position_local] = identifier[field_position_local] ,
identifier[field_position_global] = identifier[field_position_global] )
keyword[return] identifier[field_position_local] | def record_add_fields(rec, tag, fields, field_position_local=None, field_position_global=None):
"""
Add the fields into the record at the required position.
The position is specified by the tag and the field_position_local in the
list of fields.
:param rec: a record structure
:param tag: the tag of the fields to be moved
:param field_position_local: the field_position_local to which the field
will be inserted. If not specified, appends
the fields to the tag.
:param a: list of fields to be added
:return: -1 if the operation failed, or the field_position_local if it was
successful
"""
if field_position_local is None and field_position_global is None:
for field in fields:
record_add_field(rec, tag, ind1=field[1], ind2=field[2], subfields=field[0], controlfield_value=field[3]) # depends on [control=['for'], data=['field']] # depends on [control=['if'], data=[]]
else:
fields.reverse()
for field in fields:
record_add_field(rec, tag, ind1=field[1], ind2=field[2], subfields=field[0], controlfield_value=field[3], field_position_local=field_position_local, field_position_global=field_position_global) # depends on [control=['for'], data=['field']]
return field_position_local |
def get_blank_row(self, filler="-", splitter="+"):
"""Gets blank row
:param filler: Fill empty columns with this char
:param splitter: Separate columns with this char
:return: Pretty formatted blank row (with no meaningful data in it)
"""
return self.get_pretty_row(
["" for _ in self.widths], # blanks
filler, # fill with this
splitter, # split columns with this
) | def function[get_blank_row, parameter[self, filler, splitter]]:
constant[Gets blank row
:param filler: Fill empty columns with this char
:param splitter: Separate columns with this char
:return: Pretty formatted blank row (with no meaningful data in it)
]
return[call[name[self].get_pretty_row, parameter[<ast.ListComp object at 0x7da1b1da5150>, name[filler], name[splitter]]]] | keyword[def] identifier[get_blank_row] ( identifier[self] , identifier[filler] = literal[string] , identifier[splitter] = literal[string] ):
literal[string]
keyword[return] identifier[self] . identifier[get_pretty_row] (
[ literal[string] keyword[for] identifier[_] keyword[in] identifier[self] . identifier[widths] ],
identifier[filler] ,
identifier[splitter] ,
) | def get_blank_row(self, filler='-', splitter='+'):
"""Gets blank row
:param filler: Fill empty columns with this char
:param splitter: Separate columns with this char
:return: Pretty formatted blank row (with no meaningful data in it)
""" # blanks
# fill with this
# split columns with this
return self.get_pretty_row(['' for _ in self.widths], filler, splitter) |
def confirm(message: Text,
default: bool = True,
qmark: Text = DEFAULT_QUESTION_PREFIX,
style: Optional[Style] = None,
**kwargs: Any) -> Question:
"""Prompt the user to confirm or reject.
This question type can be used to prompt the user for a confirmation
of a yes-or-no question. If the user just hits enter, the default
value will be returned.
Args:
message: Question text
default: Default value will be returned if the user just hits
enter.
qmark: Question prefix displayed in front of the question.
By default this is a `?`
style: A custom color and style for the question parts. You can
configure colors as well as font types for different elements.
Returns:
Question: Question instance, ready to be prompted (using `.ask()`).
"""
merged_style = merge_styles([DEFAULT_STYLE, style])
status = {'answer': None}
def get_prompt_tokens():
tokens = []
tokens.append(("class:qmark", qmark))
tokens.append(("class:question", ' {} '.format(message)))
if status['answer'] is not None:
answer = ' {}'.format(YES if status['answer'] else NO)
tokens.append(("class:answer", answer))
else:
instruction = ' {}'.format(YES_OR_NO if default else NO_OR_YES)
tokens.append(("class:instruction", instruction))
return to_formatted_text(tokens)
bindings = KeyBindings()
@bindings.add(Keys.ControlQ, eager=True)
@bindings.add(Keys.ControlC, eager=True)
def _(event):
event.app.exit(exception=KeyboardInterrupt, style='class:aborting')
@bindings.add('n')
@bindings.add('N')
def key_n(event):
status['answer'] = False
event.app.exit(result=False)
@bindings.add('y')
@bindings.add('Y')
def key_y(event):
status['answer'] = True
event.app.exit(result=True)
@bindings.add(Keys.ControlM, eager=True)
def set_answer(event):
status['answer'] = default
event.app.exit(result=default)
@bindings.add(Keys.Any)
def other(event):
"""Disallow inserting other text."""
pass
return Question(PromptSession(get_prompt_tokens,
key_bindings=bindings,
style=merged_style,
**kwargs).app) | def function[confirm, parameter[message, default, qmark, style]]:
constant[Prompt the user to confirm or reject.
This question type can be used to prompt the user for a confirmation
of a yes-or-no question. If the user just hits enter, the default
value will be returned.
Args:
message: Question text
default: Default value will be returned if the user just hits
enter.
qmark: Question prefix displayed in front of the question.
By default this is a `?`
style: A custom color and style for the question parts. You can
configure colors as well as font types for different elements.
Returns:
Question: Question instance, ready to be prompted (using `.ask()`).
]
variable[merged_style] assign[=] call[name[merge_styles], parameter[list[[<ast.Name object at 0x7da1b07927a0>, <ast.Name object at 0x7da1b07909a0>]]]]
variable[status] assign[=] dictionary[[<ast.Constant object at 0x7da1b0790a00>], [<ast.Constant object at 0x7da1b07924d0>]]
def function[get_prompt_tokens, parameter[]]:
variable[tokens] assign[=] list[[]]
call[name[tokens].append, parameter[tuple[[<ast.Constant object at 0x7da1b0791300>, <ast.Name object at 0x7da1b07913c0>]]]]
call[name[tokens].append, parameter[tuple[[<ast.Constant object at 0x7da1b0791360>, <ast.Call object at 0x7da1b0791600>]]]]
if compare[call[name[status]][constant[answer]] is_not constant[None]] begin[:]
variable[answer] assign[=] call[constant[ {}].format, parameter[<ast.IfExp object at 0x7da1b0791f30>]]
call[name[tokens].append, parameter[tuple[[<ast.Constant object at 0x7da1b0791f90>, <ast.Name object at 0x7da1b0791ff0>]]]]
return[call[name[to_formatted_text], parameter[name[tokens]]]]
variable[bindings] assign[=] call[name[KeyBindings], parameter[]]
def function[_, parameter[event]]:
call[name[event].app.exit, parameter[]]
def function[key_n, parameter[event]]:
call[name[status]][constant[answer]] assign[=] constant[False]
call[name[event].app.exit, parameter[]]
def function[key_y, parameter[event]]:
call[name[status]][constant[answer]] assign[=] constant[True]
call[name[event].app.exit, parameter[]]
def function[set_answer, parameter[event]]:
call[name[status]][constant[answer]] assign[=] name[default]
call[name[event].app.exit, parameter[]]
def function[other, parameter[event]]:
constant[Disallow inserting other text.]
pass
return[call[name[Question], parameter[call[name[PromptSession], parameter[name[get_prompt_tokens]]].app]]] | keyword[def] identifier[confirm] ( identifier[message] : identifier[Text] ,
identifier[default] : identifier[bool] = keyword[True] ,
identifier[qmark] : identifier[Text] = identifier[DEFAULT_QUESTION_PREFIX] ,
identifier[style] : identifier[Optional] [ identifier[Style] ]= keyword[None] ,
** identifier[kwargs] : identifier[Any] )-> identifier[Question] :
literal[string]
identifier[merged_style] = identifier[merge_styles] ([ identifier[DEFAULT_STYLE] , identifier[style] ])
identifier[status] ={ literal[string] : keyword[None] }
keyword[def] identifier[get_prompt_tokens] ():
identifier[tokens] =[]
identifier[tokens] . identifier[append] (( literal[string] , identifier[qmark] ))
identifier[tokens] . identifier[append] (( literal[string] , literal[string] . identifier[format] ( identifier[message] )))
keyword[if] identifier[status] [ literal[string] ] keyword[is] keyword[not] keyword[None] :
identifier[answer] = literal[string] . identifier[format] ( identifier[YES] keyword[if] identifier[status] [ literal[string] ] keyword[else] identifier[NO] )
identifier[tokens] . identifier[append] (( literal[string] , identifier[answer] ))
keyword[else] :
identifier[instruction] = literal[string] . identifier[format] ( identifier[YES_OR_NO] keyword[if] identifier[default] keyword[else] identifier[NO_OR_YES] )
identifier[tokens] . identifier[append] (( literal[string] , identifier[instruction] ))
keyword[return] identifier[to_formatted_text] ( identifier[tokens] )
identifier[bindings] = identifier[KeyBindings] ()
@ identifier[bindings] . identifier[add] ( identifier[Keys] . identifier[ControlQ] , identifier[eager] = keyword[True] )
@ identifier[bindings] . identifier[add] ( identifier[Keys] . identifier[ControlC] , identifier[eager] = keyword[True] )
keyword[def] identifier[_] ( identifier[event] ):
identifier[event] . identifier[app] . identifier[exit] ( identifier[exception] = identifier[KeyboardInterrupt] , identifier[style] = literal[string] )
@ identifier[bindings] . identifier[add] ( literal[string] )
@ identifier[bindings] . identifier[add] ( literal[string] )
keyword[def] identifier[key_n] ( identifier[event] ):
identifier[status] [ literal[string] ]= keyword[False]
identifier[event] . identifier[app] . identifier[exit] ( identifier[result] = keyword[False] )
@ identifier[bindings] . identifier[add] ( literal[string] )
@ identifier[bindings] . identifier[add] ( literal[string] )
keyword[def] identifier[key_y] ( identifier[event] ):
identifier[status] [ literal[string] ]= keyword[True]
identifier[event] . identifier[app] . identifier[exit] ( identifier[result] = keyword[True] )
@ identifier[bindings] . identifier[add] ( identifier[Keys] . identifier[ControlM] , identifier[eager] = keyword[True] )
keyword[def] identifier[set_answer] ( identifier[event] ):
identifier[status] [ literal[string] ]= identifier[default]
identifier[event] . identifier[app] . identifier[exit] ( identifier[result] = identifier[default] )
@ identifier[bindings] . identifier[add] ( identifier[Keys] . identifier[Any] )
keyword[def] identifier[other] ( identifier[event] ):
literal[string]
keyword[pass]
keyword[return] identifier[Question] ( identifier[PromptSession] ( identifier[get_prompt_tokens] ,
identifier[key_bindings] = identifier[bindings] ,
identifier[style] = identifier[merged_style] ,
** identifier[kwargs] ). identifier[app] ) | def confirm(message: Text, default: bool=True, qmark: Text=DEFAULT_QUESTION_PREFIX, style: Optional[Style]=None, **kwargs: Any) -> Question:
"""Prompt the user to confirm or reject.
This question type can be used to prompt the user for a confirmation
of a yes-or-no question. If the user just hits enter, the default
value will be returned.
Args:
message: Question text
default: Default value will be returned if the user just hits
enter.
qmark: Question prefix displayed in front of the question.
By default this is a `?`
style: A custom color and style for the question parts. You can
configure colors as well as font types for different elements.
Returns:
Question: Question instance, ready to be prompted (using `.ask()`).
"""
merged_style = merge_styles([DEFAULT_STYLE, style])
status = {'answer': None}
def get_prompt_tokens():
tokens = []
tokens.append(('class:qmark', qmark))
tokens.append(('class:question', ' {} '.format(message)))
if status['answer'] is not None:
answer = ' {}'.format(YES if status['answer'] else NO)
tokens.append(('class:answer', answer)) # depends on [control=['if'], data=[]]
else:
instruction = ' {}'.format(YES_OR_NO if default else NO_OR_YES)
tokens.append(('class:instruction', instruction))
return to_formatted_text(tokens)
bindings = KeyBindings()
@bindings.add(Keys.ControlQ, eager=True)
@bindings.add(Keys.ControlC, eager=True)
def _(event):
event.app.exit(exception=KeyboardInterrupt, style='class:aborting')
@bindings.add('n')
@bindings.add('N')
def key_n(event):
status['answer'] = False
event.app.exit(result=False)
@bindings.add('y')
@bindings.add('Y')
def key_y(event):
status['answer'] = True
event.app.exit(result=True)
@bindings.add(Keys.ControlM, eager=True)
def set_answer(event):
status['answer'] = default
event.app.exit(result=default)
@bindings.add(Keys.Any)
def other(event):
"""Disallow inserting other text."""
pass
return Question(PromptSession(get_prompt_tokens, key_bindings=bindings, style=merged_style, **kwargs).app) |
def detach_usage_plan_from_apis(plan_id, apis, region=None, key=None, keyid=None, profile=None):
'''
Detaches given usage plan from each of the apis provided in a list of apiId and stage value
.. versionadded:: 2017.7.0
apis
a list of dictionaries, where each dictionary contains the following:
apiId
a string, which is the id of the created API in AWS ApiGateway
stage
a string, which is the stage that the created API is deployed to.
CLI Example:
.. code-block:: bash
salt myminion boto_apigateway.detach_usage_plan_to_apis plan_id='usage plan id' apis='[{"apiId": "some id 1", "stage": "some stage 1"}]'
'''
return _update_usage_plan_apis(plan_id, apis, 'remove', region=region, key=key, keyid=keyid, profile=profile) | def function[detach_usage_plan_from_apis, parameter[plan_id, apis, region, key, keyid, profile]]:
constant[
Detaches given usage plan from each of the apis provided in a list of apiId and stage value
.. versionadded:: 2017.7.0
apis
a list of dictionaries, where each dictionary contains the following:
apiId
a string, which is the id of the created API in AWS ApiGateway
stage
a string, which is the stage that the created API is deployed to.
CLI Example:
.. code-block:: bash
salt myminion boto_apigateway.detach_usage_plan_to_apis plan_id='usage plan id' apis='[{"apiId": "some id 1", "stage": "some stage 1"}]'
]
return[call[name[_update_usage_plan_apis], parameter[name[plan_id], name[apis], constant[remove]]]] | keyword[def] identifier[detach_usage_plan_from_apis] ( identifier[plan_id] , identifier[apis] , identifier[region] = keyword[None] , identifier[key] = keyword[None] , identifier[keyid] = keyword[None] , identifier[profile] = keyword[None] ):
literal[string]
keyword[return] identifier[_update_usage_plan_apis] ( identifier[plan_id] , identifier[apis] , literal[string] , identifier[region] = identifier[region] , identifier[key] = identifier[key] , identifier[keyid] = identifier[keyid] , identifier[profile] = identifier[profile] ) | def detach_usage_plan_from_apis(plan_id, apis, region=None, key=None, keyid=None, profile=None):
"""
Detaches given usage plan from each of the apis provided in a list of apiId and stage value
.. versionadded:: 2017.7.0
apis
a list of dictionaries, where each dictionary contains the following:
apiId
a string, which is the id of the created API in AWS ApiGateway
stage
a string, which is the stage that the created API is deployed to.
CLI Example:
.. code-block:: bash
salt myminion boto_apigateway.detach_usage_plan_to_apis plan_id='usage plan id' apis='[{"apiId": "some id 1", "stage": "some stage 1"}]'
"""
return _update_usage_plan_apis(plan_id, apis, 'remove', region=region, key=key, keyid=keyid, profile=profile) |
def set_data_matrix_chunk_size(df_shape, max_chunk_kb, elem_per_kb):
"""
Sets chunk size to use for writing data matrix.
Note. Calculation used here is for compatibility with cmapM and cmapR.
Input:
- df_shape (tuple): shape of input data_df.
- max_chunk_kb (int, default=1024): The maximum number of KB a given chunk will occupy
- elem_per_kb (int): Number of elements per kb
Returns:
chunk size (tuple) to use for chunking the data matrix
"""
row_chunk_size = min(df_shape[0], 1000)
col_chunk_size = min(((max_chunk_kb*elem_per_kb)//row_chunk_size), df_shape[1])
return (row_chunk_size, col_chunk_size) | def function[set_data_matrix_chunk_size, parameter[df_shape, max_chunk_kb, elem_per_kb]]:
constant[
Sets chunk size to use for writing data matrix.
Note. Calculation used here is for compatibility with cmapM and cmapR.
Input:
- df_shape (tuple): shape of input data_df.
- max_chunk_kb (int, default=1024): The maximum number of KB a given chunk will occupy
- elem_per_kb (int): Number of elements per kb
Returns:
chunk size (tuple) to use for chunking the data matrix
]
variable[row_chunk_size] assign[=] call[name[min], parameter[call[name[df_shape]][constant[0]], constant[1000]]]
variable[col_chunk_size] assign[=] call[name[min], parameter[binary_operation[binary_operation[name[max_chunk_kb] * name[elem_per_kb]] <ast.FloorDiv object at 0x7da2590d6bc0> name[row_chunk_size]], call[name[df_shape]][constant[1]]]]
return[tuple[[<ast.Name object at 0x7da204567c70>, <ast.Name object at 0x7da204565240>]]] | keyword[def] identifier[set_data_matrix_chunk_size] ( identifier[df_shape] , identifier[max_chunk_kb] , identifier[elem_per_kb] ):
literal[string]
identifier[row_chunk_size] = identifier[min] ( identifier[df_shape] [ literal[int] ], literal[int] )
identifier[col_chunk_size] = identifier[min] ((( identifier[max_chunk_kb] * identifier[elem_per_kb] )// identifier[row_chunk_size] ), identifier[df_shape] [ literal[int] ])
keyword[return] ( identifier[row_chunk_size] , identifier[col_chunk_size] ) | def set_data_matrix_chunk_size(df_shape, max_chunk_kb, elem_per_kb):
"""
Sets chunk size to use for writing data matrix.
Note. Calculation used here is for compatibility with cmapM and cmapR.
Input:
- df_shape (tuple): shape of input data_df.
- max_chunk_kb (int, default=1024): The maximum number of KB a given chunk will occupy
- elem_per_kb (int): Number of elements per kb
Returns:
chunk size (tuple) to use for chunking the data matrix
"""
row_chunk_size = min(df_shape[0], 1000)
col_chunk_size = min(max_chunk_kb * elem_per_kb // row_chunk_size, df_shape[1])
return (row_chunk_size, col_chunk_size) |
def resolve_slashed(self, path):
""" Resolve symlinked directories if they end in a '/',
remove trailing '/' otherwise.
"""
if path.endswith(os.sep):
path = path.rstrip(os.sep)
if os.path.islink(path):
real = os.path.realpath(path)
self.LOG.debug('Resolved "%s/" to "%s"' % (path, real))
path = real
return path | def function[resolve_slashed, parameter[self, path]]:
constant[ Resolve symlinked directories if they end in a '/',
remove trailing '/' otherwise.
]
if call[name[path].endswith, parameter[name[os].sep]] begin[:]
variable[path] assign[=] call[name[path].rstrip, parameter[name[os].sep]]
if call[name[os].path.islink, parameter[name[path]]] begin[:]
variable[real] assign[=] call[name[os].path.realpath, parameter[name[path]]]
call[name[self].LOG.debug, parameter[binary_operation[constant[Resolved "%s/" to "%s"] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18f720310>, <ast.Name object at 0x7da18f723d60>]]]]]
variable[path] assign[=] name[real]
return[name[path]] | keyword[def] identifier[resolve_slashed] ( identifier[self] , identifier[path] ):
literal[string]
keyword[if] identifier[path] . identifier[endswith] ( identifier[os] . identifier[sep] ):
identifier[path] = identifier[path] . identifier[rstrip] ( identifier[os] . identifier[sep] )
keyword[if] identifier[os] . identifier[path] . identifier[islink] ( identifier[path] ):
identifier[real] = identifier[os] . identifier[path] . identifier[realpath] ( identifier[path] )
identifier[self] . identifier[LOG] . identifier[debug] ( literal[string] %( identifier[path] , identifier[real] ))
identifier[path] = identifier[real]
keyword[return] identifier[path] | def resolve_slashed(self, path):
""" Resolve symlinked directories if they end in a '/',
remove trailing '/' otherwise.
"""
if path.endswith(os.sep):
path = path.rstrip(os.sep)
if os.path.islink(path):
real = os.path.realpath(path)
self.LOG.debug('Resolved "%s/" to "%s"' % (path, real))
path = real # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return path |
def factory(cls, note, fn=None):
"""Register a function as a provider.
Function (name support is optional)::
from jeni import Injector as BaseInjector
from jeni import Provider
class Injector(BaseInjector):
pass
@Injector.factory('echo')
def echo(name=None):
return name
Registration can be a decorator or a direct method call::
Injector.factory('echo', echo)
"""
def decorator(f):
provider = cls.factory_provider.bind(f)
cls.register(note, provider)
return f
if fn is not None:
decorator(fn)
else:
return decorator | def function[factory, parameter[cls, note, fn]]:
constant[Register a function as a provider.
Function (name support is optional)::
from jeni import Injector as BaseInjector
from jeni import Provider
class Injector(BaseInjector):
pass
@Injector.factory('echo')
def echo(name=None):
return name
Registration can be a decorator or a direct method call::
Injector.factory('echo', echo)
]
def function[decorator, parameter[f]]:
variable[provider] assign[=] call[name[cls].factory_provider.bind, parameter[name[f]]]
call[name[cls].register, parameter[name[note], name[provider]]]
return[name[f]]
if compare[name[fn] is_not constant[None]] begin[:]
call[name[decorator], parameter[name[fn]]] | keyword[def] identifier[factory] ( identifier[cls] , identifier[note] , identifier[fn] = keyword[None] ):
literal[string]
keyword[def] identifier[decorator] ( identifier[f] ):
identifier[provider] = identifier[cls] . identifier[factory_provider] . identifier[bind] ( identifier[f] )
identifier[cls] . identifier[register] ( identifier[note] , identifier[provider] )
keyword[return] identifier[f]
keyword[if] identifier[fn] keyword[is] keyword[not] keyword[None] :
identifier[decorator] ( identifier[fn] )
keyword[else] :
keyword[return] identifier[decorator] | def factory(cls, note, fn=None):
"""Register a function as a provider.
Function (name support is optional)::
from jeni import Injector as BaseInjector
from jeni import Provider
class Injector(BaseInjector):
pass
@Injector.factory('echo')
def echo(name=None):
return name
Registration can be a decorator or a direct method call::
Injector.factory('echo', echo)
"""
def decorator(f):
provider = cls.factory_provider.bind(f)
cls.register(note, provider)
return f
if fn is not None:
decorator(fn) # depends on [control=['if'], data=['fn']]
else:
return decorator |
def unauth(request):
"""
logout and remove all session data
"""
if check_key(request):
api = get_api(request)
request.session.clear()
logout(request)
return HttpResponseRedirect(reverse('main')) | def function[unauth, parameter[request]]:
constant[
logout and remove all session data
]
if call[name[check_key], parameter[name[request]]] begin[:]
variable[api] assign[=] call[name[get_api], parameter[name[request]]]
call[name[request].session.clear, parameter[]]
call[name[logout], parameter[name[request]]]
return[call[name[HttpResponseRedirect], parameter[call[name[reverse], parameter[constant[main]]]]]] | keyword[def] identifier[unauth] ( identifier[request] ):
literal[string]
keyword[if] identifier[check_key] ( identifier[request] ):
identifier[api] = identifier[get_api] ( identifier[request] )
identifier[request] . identifier[session] . identifier[clear] ()
identifier[logout] ( identifier[request] )
keyword[return] identifier[HttpResponseRedirect] ( identifier[reverse] ( literal[string] )) | def unauth(request):
"""
logout and remove all session data
"""
if check_key(request):
api = get_api(request)
request.session.clear()
logout(request) # depends on [control=['if'], data=[]]
return HttpResponseRedirect(reverse('main')) |
def getRankMaps(self):
"""
Returns a list of dictionaries, one for each preference, that associates the integer
representation of each candidate with its position in the ranking, starting from 1 and
returns a list of the number of times each preference is given.
"""
rankMaps = []
for preference in self.preferences:
rankMaps.append(preference.getRankMap())
return rankMaps | def function[getRankMaps, parameter[self]]:
constant[
Returns a list of dictionaries, one for each preference, that associates the integer
representation of each candidate with its position in the ranking, starting from 1 and
returns a list of the number of times each preference is given.
]
variable[rankMaps] assign[=] list[[]]
for taget[name[preference]] in starred[name[self].preferences] begin[:]
call[name[rankMaps].append, parameter[call[name[preference].getRankMap, parameter[]]]]
return[name[rankMaps]] | keyword[def] identifier[getRankMaps] ( identifier[self] ):
literal[string]
identifier[rankMaps] =[]
keyword[for] identifier[preference] keyword[in] identifier[self] . identifier[preferences] :
identifier[rankMaps] . identifier[append] ( identifier[preference] . identifier[getRankMap] ())
keyword[return] identifier[rankMaps] | def getRankMaps(self):
"""
Returns a list of dictionaries, one for each preference, that associates the integer
representation of each candidate with its position in the ranking, starting from 1 and
returns a list of the number of times each preference is given.
"""
rankMaps = []
for preference in self.preferences:
rankMaps.append(preference.getRankMap()) # depends on [control=['for'], data=['preference']]
return rankMaps |
def execute_rex_code(self, code, filename=None, shell=None,
parent_environ=None, **Popen_args):
"""Run some rex code in the context.
Note:
This is just a convenience form of `execute_shell`.
Args:
code (str): Rex code to execute.
filename (str): Filename to report if there are syntax errors.
shell: Shell type, for eg 'bash'. If None, the current shell type
is used.
parent_environ: Environment to run the shell process in, if None
then the current environment is used.
Popen_args: args to pass to the shell process object constructor.
Returns:
`subprocess.Popen` object for the shell process.
"""
def _actions_callback(executor):
executor.execute_code(code, filename=filename)
return self.execute_shell(shell=shell,
parent_environ=parent_environ,
command='', # don't run any command
block=False,
actions_callback=_actions_callback,
**Popen_args) | def function[execute_rex_code, parameter[self, code, filename, shell, parent_environ]]:
constant[Run some rex code in the context.
Note:
This is just a convenience form of `execute_shell`.
Args:
code (str): Rex code to execute.
filename (str): Filename to report if there are syntax errors.
shell: Shell type, for eg 'bash'. If None, the current shell type
is used.
parent_environ: Environment to run the shell process in, if None
then the current environment is used.
Popen_args: args to pass to the shell process object constructor.
Returns:
`subprocess.Popen` object for the shell process.
]
def function[_actions_callback, parameter[executor]]:
call[name[executor].execute_code, parameter[name[code]]]
return[call[name[self].execute_shell, parameter[]]] | keyword[def] identifier[execute_rex_code] ( identifier[self] , identifier[code] , identifier[filename] = keyword[None] , identifier[shell] = keyword[None] ,
identifier[parent_environ] = keyword[None] ,** identifier[Popen_args] ):
literal[string]
keyword[def] identifier[_actions_callback] ( identifier[executor] ):
identifier[executor] . identifier[execute_code] ( identifier[code] , identifier[filename] = identifier[filename] )
keyword[return] identifier[self] . identifier[execute_shell] ( identifier[shell] = identifier[shell] ,
identifier[parent_environ] = identifier[parent_environ] ,
identifier[command] = literal[string] ,
identifier[block] = keyword[False] ,
identifier[actions_callback] = identifier[_actions_callback] ,
** identifier[Popen_args] ) | def execute_rex_code(self, code, filename=None, shell=None, parent_environ=None, **Popen_args):
"""Run some rex code in the context.
Note:
This is just a convenience form of `execute_shell`.
Args:
code (str): Rex code to execute.
filename (str): Filename to report if there are syntax errors.
shell: Shell type, for eg 'bash'. If None, the current shell type
is used.
parent_environ: Environment to run the shell process in, if None
then the current environment is used.
Popen_args: args to pass to the shell process object constructor.
Returns:
`subprocess.Popen` object for the shell process.
"""
def _actions_callback(executor):
executor.execute_code(code, filename=filename) # don't run any command
return self.execute_shell(shell=shell, parent_environ=parent_environ, command='', block=False, actions_callback=_actions_callback, **Popen_args) |
def add_cert(self, cert):
"""
Explicitely adds certificate to set of trusted in the store
@param cert - X509 object to add
"""
if not isinstance(cert, X509):
raise TypeError("cert should be X509")
libcrypto.X509_STORE_add_cert(self.store, cert.cert) | def function[add_cert, parameter[self, cert]]:
constant[
Explicitely adds certificate to set of trusted in the store
@param cert - X509 object to add
]
if <ast.UnaryOp object at 0x7da1b28b8730> begin[:]
<ast.Raise object at 0x7da1b28ae560>
call[name[libcrypto].X509_STORE_add_cert, parameter[name[self].store, name[cert].cert]] | keyword[def] identifier[add_cert] ( identifier[self] , identifier[cert] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[cert] , identifier[X509] ):
keyword[raise] identifier[TypeError] ( literal[string] )
identifier[libcrypto] . identifier[X509_STORE_add_cert] ( identifier[self] . identifier[store] , identifier[cert] . identifier[cert] ) | def add_cert(self, cert):
"""
Explicitely adds certificate to set of trusted in the store
@param cert - X509 object to add
"""
if not isinstance(cert, X509):
raise TypeError('cert should be X509') # depends on [control=['if'], data=[]]
libcrypto.X509_STORE_add_cert(self.store, cert.cert) |
def create_cells(self, blocks):
"""Turn the list of blocks into a list of notebook cells."""
cells = []
for block in blocks:
if (block['type'] == self.code) and (block['IO'] == 'input'):
code_cell = self.create_code_cell(block)
cells.append(code_cell)
elif (block['type'] == self.code and
block['IO'] == 'output' and
cells[-1].cell_type == 'code'):
cells[-1].outputs = self.create_outputs(block)
elif block['type'] == self.markdown:
markdown_cell = self.create_markdown_cell(block)
cells.append(markdown_cell)
else:
raise NotImplementedError("{} is not supported as a cell"
"type".format(block['type']))
return cells | def function[create_cells, parameter[self, blocks]]:
constant[Turn the list of blocks into a list of notebook cells.]
variable[cells] assign[=] list[[]]
for taget[name[block]] in starred[name[blocks]] begin[:]
if <ast.BoolOp object at 0x7da1b1218e20> begin[:]
variable[code_cell] assign[=] call[name[self].create_code_cell, parameter[name[block]]]
call[name[cells].append, parameter[name[code_cell]]]
return[name[cells]] | keyword[def] identifier[create_cells] ( identifier[self] , identifier[blocks] ):
literal[string]
identifier[cells] =[]
keyword[for] identifier[block] keyword[in] identifier[blocks] :
keyword[if] ( identifier[block] [ literal[string] ]== identifier[self] . identifier[code] ) keyword[and] ( identifier[block] [ literal[string] ]== literal[string] ):
identifier[code_cell] = identifier[self] . identifier[create_code_cell] ( identifier[block] )
identifier[cells] . identifier[append] ( identifier[code_cell] )
keyword[elif] ( identifier[block] [ literal[string] ]== identifier[self] . identifier[code] keyword[and]
identifier[block] [ literal[string] ]== literal[string] keyword[and]
identifier[cells] [- literal[int] ]. identifier[cell_type] == literal[string] ):
identifier[cells] [- literal[int] ]. identifier[outputs] = identifier[self] . identifier[create_outputs] ( identifier[block] )
keyword[elif] identifier[block] [ literal[string] ]== identifier[self] . identifier[markdown] :
identifier[markdown_cell] = identifier[self] . identifier[create_markdown_cell] ( identifier[block] )
identifier[cells] . identifier[append] ( identifier[markdown_cell] )
keyword[else] :
keyword[raise] identifier[NotImplementedError] ( literal[string]
literal[string] . identifier[format] ( identifier[block] [ literal[string] ]))
keyword[return] identifier[cells] | def create_cells(self, blocks):
"""Turn the list of blocks into a list of notebook cells."""
cells = []
for block in blocks:
if block['type'] == self.code and block['IO'] == 'input':
code_cell = self.create_code_cell(block)
cells.append(code_cell) # depends on [control=['if'], data=[]]
elif block['type'] == self.code and block['IO'] == 'output' and (cells[-1].cell_type == 'code'):
cells[-1].outputs = self.create_outputs(block) # depends on [control=['if'], data=[]]
elif block['type'] == self.markdown:
markdown_cell = self.create_markdown_cell(block)
cells.append(markdown_cell) # depends on [control=['if'], data=[]]
else:
raise NotImplementedError('{} is not supported as a celltype'.format(block['type'])) # depends on [control=['for'], data=['block']]
return cells |
def single(self):
"""
Fetch one of the related nodes
:return: Node
"""
nodes = super(OneOrMore, self).all()
if nodes:
return nodes[0]
raise CardinalityViolation(self, 'none') | def function[single, parameter[self]]:
constant[
Fetch one of the related nodes
:return: Node
]
variable[nodes] assign[=] call[call[name[super], parameter[name[OneOrMore], name[self]]].all, parameter[]]
if name[nodes] begin[:]
return[call[name[nodes]][constant[0]]]
<ast.Raise object at 0x7da18f09eef0> | keyword[def] identifier[single] ( identifier[self] ):
literal[string]
identifier[nodes] = identifier[super] ( identifier[OneOrMore] , identifier[self] ). identifier[all] ()
keyword[if] identifier[nodes] :
keyword[return] identifier[nodes] [ literal[int] ]
keyword[raise] identifier[CardinalityViolation] ( identifier[self] , literal[string] ) | def single(self):
"""
Fetch one of the related nodes
:return: Node
"""
nodes = super(OneOrMore, self).all()
if nodes:
return nodes[0] # depends on [control=['if'], data=[]]
raise CardinalityViolation(self, 'none') |
def _expr2sat(ex, litmap): # pragma: no cover
"""Convert an expression to a DIMACS SAT string."""
if isinstance(ex, Literal):
return str(litmap[ex])
elif isinstance(ex, NotOp):
return "-(" + _expr2sat(ex.x, litmap) + ")"
elif isinstance(ex, OrOp):
return "+(" + " ".join(_expr2sat(x, litmap)
for x in ex.xs) + ")"
elif isinstance(ex, AndOp):
return "*(" + " ".join(_expr2sat(x, litmap)
for x in ex.xs) + ")"
elif isinstance(ex, XorOp):
return ("xor(" + " ".join(_expr2sat(x, litmap)
for x in ex.xs) + ")")
elif isinstance(ex, EqualOp):
return "=(" + " ".join(_expr2sat(x, litmap)
for x in ex.xs) + ")"
else:
fstr = ("expected ex to be a Literal or Not/Or/And/Xor/Equal op, "
"got {0.__name__}")
raise ValueError(fstr.format(type(ex))) | def function[_expr2sat, parameter[ex, litmap]]:
constant[Convert an expression to a DIMACS SAT string.]
if call[name[isinstance], parameter[name[ex], name[Literal]]] begin[:]
return[call[name[str], parameter[call[name[litmap]][name[ex]]]]] | keyword[def] identifier[_expr2sat] ( identifier[ex] , identifier[litmap] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[ex] , identifier[Literal] ):
keyword[return] identifier[str] ( identifier[litmap] [ identifier[ex] ])
keyword[elif] identifier[isinstance] ( identifier[ex] , identifier[NotOp] ):
keyword[return] literal[string] + identifier[_expr2sat] ( identifier[ex] . identifier[x] , identifier[litmap] )+ literal[string]
keyword[elif] identifier[isinstance] ( identifier[ex] , identifier[OrOp] ):
keyword[return] literal[string] + literal[string] . identifier[join] ( identifier[_expr2sat] ( identifier[x] , identifier[litmap] )
keyword[for] identifier[x] keyword[in] identifier[ex] . identifier[xs] )+ literal[string]
keyword[elif] identifier[isinstance] ( identifier[ex] , identifier[AndOp] ):
keyword[return] literal[string] + literal[string] . identifier[join] ( identifier[_expr2sat] ( identifier[x] , identifier[litmap] )
keyword[for] identifier[x] keyword[in] identifier[ex] . identifier[xs] )+ literal[string]
keyword[elif] identifier[isinstance] ( identifier[ex] , identifier[XorOp] ):
keyword[return] ( literal[string] + literal[string] . identifier[join] ( identifier[_expr2sat] ( identifier[x] , identifier[litmap] )
keyword[for] identifier[x] keyword[in] identifier[ex] . identifier[xs] )+ literal[string] )
keyword[elif] identifier[isinstance] ( identifier[ex] , identifier[EqualOp] ):
keyword[return] literal[string] + literal[string] . identifier[join] ( identifier[_expr2sat] ( identifier[x] , identifier[litmap] )
keyword[for] identifier[x] keyword[in] identifier[ex] . identifier[xs] )+ literal[string]
keyword[else] :
identifier[fstr] =( literal[string]
literal[string] )
keyword[raise] identifier[ValueError] ( identifier[fstr] . identifier[format] ( identifier[type] ( identifier[ex] ))) | def _expr2sat(ex, litmap): # pragma: no cover
'Convert an expression to a DIMACS SAT string.'
if isinstance(ex, Literal):
return str(litmap[ex]) # depends on [control=['if'], data=[]]
elif isinstance(ex, NotOp):
return '-(' + _expr2sat(ex.x, litmap) + ')' # depends on [control=['if'], data=[]]
elif isinstance(ex, OrOp):
return '+(' + ' '.join((_expr2sat(x, litmap) for x in ex.xs)) + ')' # depends on [control=['if'], data=[]]
elif isinstance(ex, AndOp):
return '*(' + ' '.join((_expr2sat(x, litmap) for x in ex.xs)) + ')' # depends on [control=['if'], data=[]]
elif isinstance(ex, XorOp):
return 'xor(' + ' '.join((_expr2sat(x, litmap) for x in ex.xs)) + ')' # depends on [control=['if'], data=[]]
elif isinstance(ex, EqualOp):
return '=(' + ' '.join((_expr2sat(x, litmap) for x in ex.xs)) + ')' # depends on [control=['if'], data=[]]
else:
fstr = 'expected ex to be a Literal or Not/Or/And/Xor/Equal op, got {0.__name__}'
raise ValueError(fstr.format(type(ex))) |
def create_parser(self, prog_name, subcommand):
"""
Create argument parser and deal with ``add_arguments``.
This method should not be overriden.
:param prog_name: Name of the command (argv[0])
:return: ArgumentParser
"""
parser = argparse.ArgumentParser(prog_name, subcommand)
# Add generic arguments here
self.add_arguments(parser)
return parser | def function[create_parser, parameter[self, prog_name, subcommand]]:
constant[
Create argument parser and deal with ``add_arguments``.
This method should not be overriden.
:param prog_name: Name of the command (argv[0])
:return: ArgumentParser
]
variable[parser] assign[=] call[name[argparse].ArgumentParser, parameter[name[prog_name], name[subcommand]]]
call[name[self].add_arguments, parameter[name[parser]]]
return[name[parser]] | keyword[def] identifier[create_parser] ( identifier[self] , identifier[prog_name] , identifier[subcommand] ):
literal[string]
identifier[parser] = identifier[argparse] . identifier[ArgumentParser] ( identifier[prog_name] , identifier[subcommand] )
identifier[self] . identifier[add_arguments] ( identifier[parser] )
keyword[return] identifier[parser] | def create_parser(self, prog_name, subcommand):
"""
Create argument parser and deal with ``add_arguments``.
This method should not be overriden.
:param prog_name: Name of the command (argv[0])
:return: ArgumentParser
"""
parser = argparse.ArgumentParser(prog_name, subcommand) # Add generic arguments here
self.add_arguments(parser)
return parser |
def execute_put(self, resource, **kwargs):
"""
Execute an HTTP PUT request against the API endpoints.
This method is meant for internal use.
:param resource: The last part of the URI
:param kwargs: Additional parameters for the HTTP call (`request` library)
:return: The HTTP response as JSON or `GhostException` if unsuccessful
"""
return self._request(resource, requests.put, **kwargs).json() | def function[execute_put, parameter[self, resource]]:
constant[
Execute an HTTP PUT request against the API endpoints.
This method is meant for internal use.
:param resource: The last part of the URI
:param kwargs: Additional parameters for the HTTP call (`request` library)
:return: The HTTP response as JSON or `GhostException` if unsuccessful
]
return[call[call[name[self]._request, parameter[name[resource], name[requests].put]].json, parameter[]]] | keyword[def] identifier[execute_put] ( identifier[self] , identifier[resource] ,** identifier[kwargs] ):
literal[string]
keyword[return] identifier[self] . identifier[_request] ( identifier[resource] , identifier[requests] . identifier[put] ,** identifier[kwargs] ). identifier[json] () | def execute_put(self, resource, **kwargs):
"""
Execute an HTTP PUT request against the API endpoints.
This method is meant for internal use.
:param resource: The last part of the URI
:param kwargs: Additional parameters for the HTTP call (`request` library)
:return: The HTTP response as JSON or `GhostException` if unsuccessful
"""
return self._request(resource, requests.put, **kwargs).json() |
def strip_columns(tab):
"""Strip whitespace from string columns."""
for colname in tab.colnames:
if tab[colname].dtype.kind in ['S', 'U']:
tab[colname] = np.core.defchararray.strip(tab[colname]) | def function[strip_columns, parameter[tab]]:
constant[Strip whitespace from string columns.]
for taget[name[colname]] in starred[name[tab].colnames] begin[:]
if compare[call[name[tab]][name[colname]].dtype.kind in list[[<ast.Constant object at 0x7da18eb55270>, <ast.Constant object at 0x7da18eb56a10>]]] begin[:]
call[name[tab]][name[colname]] assign[=] call[name[np].core.defchararray.strip, parameter[call[name[tab]][name[colname]]]] | keyword[def] identifier[strip_columns] ( identifier[tab] ):
literal[string]
keyword[for] identifier[colname] keyword[in] identifier[tab] . identifier[colnames] :
keyword[if] identifier[tab] [ identifier[colname] ]. identifier[dtype] . identifier[kind] keyword[in] [ literal[string] , literal[string] ]:
identifier[tab] [ identifier[colname] ]= identifier[np] . identifier[core] . identifier[defchararray] . identifier[strip] ( identifier[tab] [ identifier[colname] ]) | def strip_columns(tab):
"""Strip whitespace from string columns."""
for colname in tab.colnames:
if tab[colname].dtype.kind in ['S', 'U']:
tab[colname] = np.core.defchararray.strip(tab[colname]) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['colname']] |
def sanitize(meta, diagnostics=False):
""" Try to fix common problems, especially transcode non-standard string encodings.
"""
bad_encodings, bad_fields = set(), set()
def sane_encoding(field, text):
"Transcoding helper."
for encoding in ('utf-8', meta.get('encoding', None), 'cp1252'):
if encoding:
try:
u8_text = text.decode(encoding).encode("utf-8")
if encoding != 'utf-8':
bad_encodings.add(encoding)
bad_fields.add(field)
return u8_text
except UnicodeError:
continue
else:
# Broken beyond anything reasonable
bad_encodings.add('UNKNOWN/EXOTIC')
bad_fields.add(field)
return str(text, 'utf-8', 'replace').replace('\ufffd', '_').encode("utf-8")
# Go through all string fields and check them
for field in ("comment", "created by"):
if field in meta:
meta[field] = sane_encoding(field, meta[field])
meta["info"]["name"] = sane_encoding('info name', meta["info"]["name"])
for entry in meta["info"].get("files", []):
entry["path"] = [sane_encoding('file path', i) for i in entry["path"]]
return (meta, bad_encodings, bad_fields) if diagnostics else meta | def function[sanitize, parameter[meta, diagnostics]]:
constant[ Try to fix common problems, especially transcode non-standard string encodings.
]
<ast.Tuple object at 0x7da18ede4d30> assign[=] tuple[[<ast.Call object at 0x7da18ede6710>, <ast.Call object at 0x7da18ede5600>]]
def function[sane_encoding, parameter[field, text]]:
constant[Transcoding helper.]
for taget[name[encoding]] in starred[tuple[[<ast.Constant object at 0x7da18ede7d30>, <ast.Call object at 0x7da18ede7040>, <ast.Constant object at 0x7da18ede5960>]]] begin[:]
if name[encoding] begin[:]
<ast.Try object at 0x7da18ede6bf0>
for taget[name[field]] in starred[tuple[[<ast.Constant object at 0x7da18ede6e30>, <ast.Constant object at 0x7da18ede6440>]]] begin[:]
if compare[name[field] in name[meta]] begin[:]
call[name[meta]][name[field]] assign[=] call[name[sane_encoding], parameter[name[field], call[name[meta]][name[field]]]]
call[call[name[meta]][constant[info]]][constant[name]] assign[=] call[name[sane_encoding], parameter[constant[info name], call[call[name[meta]][constant[info]]][constant[name]]]]
for taget[name[entry]] in starred[call[call[name[meta]][constant[info]].get, parameter[constant[files], list[[]]]]] begin[:]
call[name[entry]][constant[path]] assign[=] <ast.ListComp object at 0x7da18ede6950>
return[<ast.IfExp object at 0x7da18ede6590>] | keyword[def] identifier[sanitize] ( identifier[meta] , identifier[diagnostics] = keyword[False] ):
literal[string]
identifier[bad_encodings] , identifier[bad_fields] = identifier[set] (), identifier[set] ()
keyword[def] identifier[sane_encoding] ( identifier[field] , identifier[text] ):
literal[string]
keyword[for] identifier[encoding] keyword[in] ( literal[string] , identifier[meta] . identifier[get] ( literal[string] , keyword[None] ), literal[string] ):
keyword[if] identifier[encoding] :
keyword[try] :
identifier[u8_text] = identifier[text] . identifier[decode] ( identifier[encoding] ). identifier[encode] ( literal[string] )
keyword[if] identifier[encoding] != literal[string] :
identifier[bad_encodings] . identifier[add] ( identifier[encoding] )
identifier[bad_fields] . identifier[add] ( identifier[field] )
keyword[return] identifier[u8_text]
keyword[except] identifier[UnicodeError] :
keyword[continue]
keyword[else] :
identifier[bad_encodings] . identifier[add] ( literal[string] )
identifier[bad_fields] . identifier[add] ( identifier[field] )
keyword[return] identifier[str] ( identifier[text] , literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ). identifier[encode] ( literal[string] )
keyword[for] identifier[field] keyword[in] ( literal[string] , literal[string] ):
keyword[if] identifier[field] keyword[in] identifier[meta] :
identifier[meta] [ identifier[field] ]= identifier[sane_encoding] ( identifier[field] , identifier[meta] [ identifier[field] ])
identifier[meta] [ literal[string] ][ literal[string] ]= identifier[sane_encoding] ( literal[string] , identifier[meta] [ literal[string] ][ literal[string] ])
keyword[for] identifier[entry] keyword[in] identifier[meta] [ literal[string] ]. identifier[get] ( literal[string] ,[]):
identifier[entry] [ literal[string] ]=[ identifier[sane_encoding] ( literal[string] , identifier[i] ) keyword[for] identifier[i] keyword[in] identifier[entry] [ literal[string] ]]
keyword[return] ( identifier[meta] , identifier[bad_encodings] , identifier[bad_fields] ) keyword[if] identifier[diagnostics] keyword[else] identifier[meta] | def sanitize(meta, diagnostics=False):
""" Try to fix common problems, especially transcode non-standard string encodings.
"""
(bad_encodings, bad_fields) = (set(), set())
def sane_encoding(field, text):
"""Transcoding helper."""
for encoding in ('utf-8', meta.get('encoding', None), 'cp1252'):
if encoding:
try:
u8_text = text.decode(encoding).encode('utf-8')
if encoding != 'utf-8':
bad_encodings.add(encoding)
bad_fields.add(field) # depends on [control=['if'], data=['encoding']]
return u8_text # depends on [control=['try'], data=[]]
except UnicodeError:
continue # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['encoding']]
else:
# Broken beyond anything reasonable
bad_encodings.add('UNKNOWN/EXOTIC')
bad_fields.add(field)
return str(text, 'utf-8', 'replace').replace('�', '_').encode('utf-8')
# Go through all string fields and check them
for field in ('comment', 'created by'):
if field in meta:
meta[field] = sane_encoding(field, meta[field]) # depends on [control=['if'], data=['field', 'meta']] # depends on [control=['for'], data=['field']]
meta['info']['name'] = sane_encoding('info name', meta['info']['name'])
for entry in meta['info'].get('files', []):
entry['path'] = [sane_encoding('file path', i) for i in entry['path']] # depends on [control=['for'], data=['entry']]
return (meta, bad_encodings, bad_fields) if diagnostics else meta |
def fmt_ces(c, title=None):
"""Format a |CauseEffectStructure|."""
if not c:
return '()\n'
if title is None:
title = 'Cause-effect structure'
concepts = '\n'.join(margin(x) for x in c) + '\n'
title = '{} ({} concept{})'.format(
title, len(c), '' if len(c) == 1 else 's')
return header(title, concepts, HEADER_BAR_1, HEADER_BAR_1) | def function[fmt_ces, parameter[c, title]]:
constant[Format a |CauseEffectStructure|.]
if <ast.UnaryOp object at 0x7da1b23466b0> begin[:]
return[constant[()
]]
if compare[name[title] is constant[None]] begin[:]
variable[title] assign[=] constant[Cause-effect structure]
variable[concepts] assign[=] binary_operation[call[constant[
].join, parameter[<ast.GeneratorExp object at 0x7da1b23448b0>]] + constant[
]]
variable[title] assign[=] call[constant[{} ({} concept{})].format, parameter[name[title], call[name[len], parameter[name[c]]], <ast.IfExp object at 0x7da1b2344df0>]]
return[call[name[header], parameter[name[title], name[concepts], name[HEADER_BAR_1], name[HEADER_BAR_1]]]] | keyword[def] identifier[fmt_ces] ( identifier[c] , identifier[title] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[c] :
keyword[return] literal[string]
keyword[if] identifier[title] keyword[is] keyword[None] :
identifier[title] = literal[string]
identifier[concepts] = literal[string] . identifier[join] ( identifier[margin] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[c] )+ literal[string]
identifier[title] = literal[string] . identifier[format] (
identifier[title] , identifier[len] ( identifier[c] ), literal[string] keyword[if] identifier[len] ( identifier[c] )== literal[int] keyword[else] literal[string] )
keyword[return] identifier[header] ( identifier[title] , identifier[concepts] , identifier[HEADER_BAR_1] , identifier[HEADER_BAR_1] ) | def fmt_ces(c, title=None):
"""Format a |CauseEffectStructure|."""
if not c:
return '()\n' # depends on [control=['if'], data=[]]
if title is None:
title = 'Cause-effect structure' # depends on [control=['if'], data=['title']]
concepts = '\n'.join((margin(x) for x in c)) + '\n'
title = '{} ({} concept{})'.format(title, len(c), '' if len(c) == 1 else 's')
return header(title, concepts, HEADER_BAR_1, HEADER_BAR_1) |
async def generate_waifu_insult(self, avatar):
"""Generate a waifu insult image.
This function is a coroutine.
Parameters:
avatar: str - http/s url pointing to an image, has to have proper headers and be a direct link to an image
Return Type: image data"""
if not isinstance(avatar, str):
raise TypeError("type of 'avatar' must be str.")
async with aiohttp.ClientSession() as session:
async with session.post("https://api.weeb.sh/auto-image/waifu-insult", headers=self.__headers, data={"avatar": avatar}) as resp:
if resp.status == 200:
return await resp.read()
else:
raise Exception((await resp.json())['message']) | <ast.AsyncFunctionDef object at 0x7da18ede7310> | keyword[async] keyword[def] identifier[generate_waifu_insult] ( identifier[self] , identifier[avatar] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[avatar] , identifier[str] ):
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[async] keyword[with] identifier[aiohttp] . identifier[ClientSession] () keyword[as] identifier[session] :
keyword[async] keyword[with] identifier[session] . identifier[post] ( literal[string] , identifier[headers] = identifier[self] . identifier[__headers] , identifier[data] ={ literal[string] : identifier[avatar] }) keyword[as] identifier[resp] :
keyword[if] identifier[resp] . identifier[status] == literal[int] :
keyword[return] keyword[await] identifier[resp] . identifier[read] ()
keyword[else] :
keyword[raise] identifier[Exception] (( keyword[await] identifier[resp] . identifier[json] ())[ literal[string] ]) | async def generate_waifu_insult(self, avatar):
"""Generate a waifu insult image.
This function is a coroutine.
Parameters:
avatar: str - http/s url pointing to an image, has to have proper headers and be a direct link to an image
Return Type: image data"""
if not isinstance(avatar, str):
raise TypeError("type of 'avatar' must be str.") # depends on [control=['if'], data=[]]
async with aiohttp.ClientSession() as session:
async with session.post('https://api.weeb.sh/auto-image/waifu-insult', headers=self.__headers, data={'avatar': avatar}) as resp:
if resp.status == 200:
return await resp.read() # depends on [control=['if'], data=[]]
else:
raise Exception((await resp.json())['message']) |
def bake(src):
"""
Runs the encoder on the given source file
"""
src = os.path.realpath(src)
path = os.path.dirname(src)
filename = os.path.basename(src)
html = _load_file(src).read()
if imghdr.what("", html):
html = "<html><body><img src='{}'/></body></html>".format(cgi.escape(filename))
# Change to the file's directory so image files with relative paths can be loaded correctly
cwd = os.getcwd()
os.chdir(path)
bs_html = bs4.BeautifulSoup(html, "html.parser")
images = bs_html.find_all("img")
for image in images:
_image_to_data(image)
for link in bs_html.find_all("link"):
_bake_css(link)
for script in bs_html.find_all("script"):
_bake_script(script)
os.chdir(cwd)
return bs_html | def function[bake, parameter[src]]:
constant[
Runs the encoder on the given source file
]
variable[src] assign[=] call[name[os].path.realpath, parameter[name[src]]]
variable[path] assign[=] call[name[os].path.dirname, parameter[name[src]]]
variable[filename] assign[=] call[name[os].path.basename, parameter[name[src]]]
variable[html] assign[=] call[call[name[_load_file], parameter[name[src]]].read, parameter[]]
if call[name[imghdr].what, parameter[constant[], name[html]]] begin[:]
variable[html] assign[=] call[constant[<html><body><img src='{}'/></body></html>].format, parameter[call[name[cgi].escape, parameter[name[filename]]]]]
variable[cwd] assign[=] call[name[os].getcwd, parameter[]]
call[name[os].chdir, parameter[name[path]]]
variable[bs_html] assign[=] call[name[bs4].BeautifulSoup, parameter[name[html], constant[html.parser]]]
variable[images] assign[=] call[name[bs_html].find_all, parameter[constant[img]]]
for taget[name[image]] in starred[name[images]] begin[:]
call[name[_image_to_data], parameter[name[image]]]
for taget[name[link]] in starred[call[name[bs_html].find_all, parameter[constant[link]]]] begin[:]
call[name[_bake_css], parameter[name[link]]]
for taget[name[script]] in starred[call[name[bs_html].find_all, parameter[constant[script]]]] begin[:]
call[name[_bake_script], parameter[name[script]]]
call[name[os].chdir, parameter[name[cwd]]]
return[name[bs_html]] | keyword[def] identifier[bake] ( identifier[src] ):
literal[string]
identifier[src] = identifier[os] . identifier[path] . identifier[realpath] ( identifier[src] )
identifier[path] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[src] )
identifier[filename] = identifier[os] . identifier[path] . identifier[basename] ( identifier[src] )
identifier[html] = identifier[_load_file] ( identifier[src] ). identifier[read] ()
keyword[if] identifier[imghdr] . identifier[what] ( literal[string] , identifier[html] ):
identifier[html] = literal[string] . identifier[format] ( identifier[cgi] . identifier[escape] ( identifier[filename] ))
identifier[cwd] = identifier[os] . identifier[getcwd] ()
identifier[os] . identifier[chdir] ( identifier[path] )
identifier[bs_html] = identifier[bs4] . identifier[BeautifulSoup] ( identifier[html] , literal[string] )
identifier[images] = identifier[bs_html] . identifier[find_all] ( literal[string] )
keyword[for] identifier[image] keyword[in] identifier[images] :
identifier[_image_to_data] ( identifier[image] )
keyword[for] identifier[link] keyword[in] identifier[bs_html] . identifier[find_all] ( literal[string] ):
identifier[_bake_css] ( identifier[link] )
keyword[for] identifier[script] keyword[in] identifier[bs_html] . identifier[find_all] ( literal[string] ):
identifier[_bake_script] ( identifier[script] )
identifier[os] . identifier[chdir] ( identifier[cwd] )
keyword[return] identifier[bs_html] | def bake(src):
"""
Runs the encoder on the given source file
"""
src = os.path.realpath(src)
path = os.path.dirname(src)
filename = os.path.basename(src)
html = _load_file(src).read()
if imghdr.what('', html):
html = "<html><body><img src='{}'/></body></html>".format(cgi.escape(filename)) # depends on [control=['if'], data=[]]
# Change to the file's directory so image files with relative paths can be loaded correctly
cwd = os.getcwd()
os.chdir(path)
bs_html = bs4.BeautifulSoup(html, 'html.parser')
images = bs_html.find_all('img')
for image in images:
_image_to_data(image) # depends on [control=['for'], data=['image']]
for link in bs_html.find_all('link'):
_bake_css(link) # depends on [control=['for'], data=['link']]
for script in bs_html.find_all('script'):
_bake_script(script) # depends on [control=['for'], data=['script']]
os.chdir(cwd)
return bs_html |
def generate_enums(outf, enums, msgs):
"""Iterate through all enums and create Swift equivalents"""
print("Generating Enums")
for enum in enums:
t.write(outf, """
${formatted_description}public enum ${swift_name}: ${raw_value_type}, Enum {
${{entry:${formatted_description}\tcase ${swift_name} = ${value}\n}}
}
extension ${swift_name} {
public static var typeName = "${name}"
public static var typeDescription = "${entity_description}"
public static var typeDebugDescription: String {
let cases = "\\n\\t".join(allMembers.map { $0.debugDescription })
return "Enum \(typeName): \(typeDescription)\\nMembers:\\n\\t\(cases)"
}
public static var allMembers = [${all_entities}]
public static var membersInfo = [${entities_info}]
}
""", enum) | def function[generate_enums, parameter[outf, enums, msgs]]:
constant[Iterate through all enums and create Swift equivalents]
call[name[print], parameter[constant[Generating Enums]]]
for taget[name[enum]] in starred[name[enums]] begin[:]
call[name[t].write, parameter[name[outf], constant[
${formatted_description}public enum ${swift_name}: ${raw_value_type}, Enum {
${{entry:${formatted_description} case ${swift_name} = ${value}
}}
}
extension ${swift_name} {
public static var typeName = "${name}"
public static var typeDescription = "${entity_description}"
public static var typeDebugDescription: String {
let cases = "\n\t".join(allMembers.map { $0.debugDescription })
return "Enum \(typeName): \(typeDescription)\nMembers:\n\t\(cases)"
}
public static var allMembers = [${all_entities}]
public static var membersInfo = [${entities_info}]
}
], name[enum]]] | keyword[def] identifier[generate_enums] ( identifier[outf] , identifier[enums] , identifier[msgs] ):
literal[string]
identifier[print] ( literal[string] )
keyword[for] identifier[enum] keyword[in] identifier[enums] :
identifier[t] . identifier[write] ( identifier[outf] , literal[string] , identifier[enum] ) | def generate_enums(outf, enums, msgs):
"""Iterate through all enums and create Swift equivalents"""
print('Generating Enums')
for enum in enums:
t.write(outf, '\n${formatted_description}public enum ${swift_name}: ${raw_value_type}, Enum {\n${{entry:${formatted_description}\tcase ${swift_name} = ${value}\n}}\n}\n\nextension ${swift_name} {\n public static var typeName = "${name}"\n public static var typeDescription = "${entity_description}"\n public static var typeDebugDescription: String {\n let cases = "\\n\\t".join(allMembers.map { $0.debugDescription })\n return "Enum \\(typeName): \\(typeDescription)\\nMembers:\\n\\t\\(cases)"\n }\n public static var allMembers = [${all_entities}]\n public static var membersInfo = [${entities_info}]\n}\n\n', enum) # depends on [control=['for'], data=['enum']] |
def rgb_to_hex(cls, color):
"""
Convert an ``(r, g, b)`` color tuple to a hexadecimal string.
Alphabetical characters in the output will be capitalized.
Args:
color (tuple): An rgb color tuple of form: (int, int, int)
Returns: string
Example:
>>> SoftColor.rgb_to_hex((0, 0, 0))
'#000000'
>>> SoftColor.rgb_to_hex((255, 255, 255))
'#FFFFFF'
"""
return '#{0:02x}{1:02x}{2:02x}'.format(
cls._bound_color_value(color[0]),
cls._bound_color_value(color[1]),
cls._bound_color_value(color[2])).upper() | def function[rgb_to_hex, parameter[cls, color]]:
constant[
Convert an ``(r, g, b)`` color tuple to a hexadecimal string.
Alphabetical characters in the output will be capitalized.
Args:
color (tuple): An rgb color tuple of form: (int, int, int)
Returns: string
Example:
>>> SoftColor.rgb_to_hex((0, 0, 0))
'#000000'
>>> SoftColor.rgb_to_hex((255, 255, 255))
'#FFFFFF'
]
return[call[call[constant[#{0:02x}{1:02x}{2:02x}].format, parameter[call[name[cls]._bound_color_value, parameter[call[name[color]][constant[0]]]], call[name[cls]._bound_color_value, parameter[call[name[color]][constant[1]]]], call[name[cls]._bound_color_value, parameter[call[name[color]][constant[2]]]]]].upper, parameter[]]] | keyword[def] identifier[rgb_to_hex] ( identifier[cls] , identifier[color] ):
literal[string]
keyword[return] literal[string] . identifier[format] (
identifier[cls] . identifier[_bound_color_value] ( identifier[color] [ literal[int] ]),
identifier[cls] . identifier[_bound_color_value] ( identifier[color] [ literal[int] ]),
identifier[cls] . identifier[_bound_color_value] ( identifier[color] [ literal[int] ])). identifier[upper] () | def rgb_to_hex(cls, color):
"""
Convert an ``(r, g, b)`` color tuple to a hexadecimal string.
Alphabetical characters in the output will be capitalized.
Args:
color (tuple): An rgb color tuple of form: (int, int, int)
Returns: string
Example:
>>> SoftColor.rgb_to_hex((0, 0, 0))
'#000000'
>>> SoftColor.rgb_to_hex((255, 255, 255))
'#FFFFFF'
"""
return '#{0:02x}{1:02x}{2:02x}'.format(cls._bound_color_value(color[0]), cls._bound_color_value(color[1]), cls._bound_color_value(color[2])).upper() |
def recv(self, x=BPF_BUFFER_LENGTH):
"""Receive a frame from the network"""
if self.buffered_frames():
# Get a frame from the buffer
return self.get_frame()
# Get data from BPF
try:
bpf_buffer = os.read(self.ins, x)
except EnvironmentError as exc:
if exc.errno != errno.EAGAIN:
warning("BPF recv()", exc_info=True)
return
# Extract all frames from the BPF buffer
self.extract_frames(bpf_buffer)
return self.get_frame() | def function[recv, parameter[self, x]]:
constant[Receive a frame from the network]
if call[name[self].buffered_frames, parameter[]] begin[:]
return[call[name[self].get_frame, parameter[]]]
<ast.Try object at 0x7da1b1fc9420>
call[name[self].extract_frames, parameter[name[bpf_buffer]]]
return[call[name[self].get_frame, parameter[]]] | keyword[def] identifier[recv] ( identifier[self] , identifier[x] = identifier[BPF_BUFFER_LENGTH] ):
literal[string]
keyword[if] identifier[self] . identifier[buffered_frames] ():
keyword[return] identifier[self] . identifier[get_frame] ()
keyword[try] :
identifier[bpf_buffer] = identifier[os] . identifier[read] ( identifier[self] . identifier[ins] , identifier[x] )
keyword[except] identifier[EnvironmentError] keyword[as] identifier[exc] :
keyword[if] identifier[exc] . identifier[errno] != identifier[errno] . identifier[EAGAIN] :
identifier[warning] ( literal[string] , identifier[exc_info] = keyword[True] )
keyword[return]
identifier[self] . identifier[extract_frames] ( identifier[bpf_buffer] )
keyword[return] identifier[self] . identifier[get_frame] () | def recv(self, x=BPF_BUFFER_LENGTH):
"""Receive a frame from the network"""
if self.buffered_frames():
# Get a frame from the buffer
return self.get_frame() # depends on [control=['if'], data=[]]
# Get data from BPF
try:
bpf_buffer = os.read(self.ins, x) # depends on [control=['try'], data=[]]
except EnvironmentError as exc:
if exc.errno != errno.EAGAIN:
warning('BPF recv()', exc_info=True) # depends on [control=['if'], data=[]]
return # depends on [control=['except'], data=['exc']]
# Extract all frames from the BPF buffer
self.extract_frames(bpf_buffer)
return self.get_frame() |
def firmware_download_input_protocol_type_sftp_protocol_sftp_password(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
firmware_download = ET.Element("firmware_download")
config = firmware_download
input = ET.SubElement(firmware_download, "input")
protocol_type = ET.SubElement(input, "protocol-type")
sftp_protocol = ET.SubElement(protocol_type, "sftp-protocol")
sftp = ET.SubElement(sftp_protocol, "sftp")
password = ET.SubElement(sftp, "password")
password.text = kwargs.pop('password')
callback = kwargs.pop('callback', self._callback)
return callback(config) | def function[firmware_download_input_protocol_type_sftp_protocol_sftp_password, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[firmware_download] assign[=] call[name[ET].Element, parameter[constant[firmware_download]]]
variable[config] assign[=] name[firmware_download]
variable[input] assign[=] call[name[ET].SubElement, parameter[name[firmware_download], constant[input]]]
variable[protocol_type] assign[=] call[name[ET].SubElement, parameter[name[input], constant[protocol-type]]]
variable[sftp_protocol] assign[=] call[name[ET].SubElement, parameter[name[protocol_type], constant[sftp-protocol]]]
variable[sftp] assign[=] call[name[ET].SubElement, parameter[name[sftp_protocol], constant[sftp]]]
variable[password] assign[=] call[name[ET].SubElement, parameter[name[sftp], constant[password]]]
name[password].text assign[=] call[name[kwargs].pop, parameter[constant[password]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]] | keyword[def] identifier[firmware_download_input_protocol_type_sftp_protocol_sftp_password] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[firmware_download] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[config] = identifier[firmware_download]
identifier[input] = identifier[ET] . identifier[SubElement] ( identifier[firmware_download] , literal[string] )
identifier[protocol_type] = identifier[ET] . identifier[SubElement] ( identifier[input] , literal[string] )
identifier[sftp_protocol] = identifier[ET] . identifier[SubElement] ( identifier[protocol_type] , literal[string] )
identifier[sftp] = identifier[ET] . identifier[SubElement] ( identifier[sftp_protocol] , literal[string] )
identifier[password] = identifier[ET] . identifier[SubElement] ( identifier[sftp] , literal[string] )
identifier[password] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] ) | def firmware_download_input_protocol_type_sftp_protocol_sftp_password(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
firmware_download = ET.Element('firmware_download')
config = firmware_download
input = ET.SubElement(firmware_download, 'input')
protocol_type = ET.SubElement(input, 'protocol-type')
sftp_protocol = ET.SubElement(protocol_type, 'sftp-protocol')
sftp = ET.SubElement(sftp_protocol, 'sftp')
password = ET.SubElement(sftp, 'password')
password.text = kwargs.pop('password')
callback = kwargs.pop('callback', self._callback)
return callback(config) |
def append_position(path, position, separator=''):
"""
Concatenate a path and a position,
between the filename and the extension.
"""
filename, extension = os.path.splitext(path)
return ''.join([filename, separator, str(position), extension]) | def function[append_position, parameter[path, position, separator]]:
constant[
Concatenate a path and a position,
between the filename and the extension.
]
<ast.Tuple object at 0x7da1b1d35150> assign[=] call[name[os].path.splitext, parameter[name[path]]]
return[call[constant[].join, parameter[list[[<ast.Name object at 0x7da1b1d36200>, <ast.Name object at 0x7da1b1d35cc0>, <ast.Call object at 0x7da1b1d35a50>, <ast.Name object at 0x7da1b1d36560>]]]]] | keyword[def] identifier[append_position] ( identifier[path] , identifier[position] , identifier[separator] = literal[string] ):
literal[string]
identifier[filename] , identifier[extension] = identifier[os] . identifier[path] . identifier[splitext] ( identifier[path] )
keyword[return] literal[string] . identifier[join] ([ identifier[filename] , identifier[separator] , identifier[str] ( identifier[position] ), identifier[extension] ]) | def append_position(path, position, separator=''):
"""
Concatenate a path and a position,
between the filename and the extension.
"""
(filename, extension) = os.path.splitext(path)
return ''.join([filename, separator, str(position), extension]) |
def add(self, media, filepath, overwrite=True):
"""Set *media* from local file *filepath*. *overwrite* parameter specify
if the media must be overwrite if it exists remotely.
"""
with open(filepath, 'rb') as fhandler:
self._dokuwiki.send('wiki.putAttachment', media,
Binary(fhandler.read()), ow=overwrite) | def function[add, parameter[self, media, filepath, overwrite]]:
constant[Set *media* from local file *filepath*. *overwrite* parameter specify
if the media must be overwrite if it exists remotely.
]
with call[name[open], parameter[name[filepath], constant[rb]]] begin[:]
call[name[self]._dokuwiki.send, parameter[constant[wiki.putAttachment], name[media], call[name[Binary], parameter[call[name[fhandler].read, parameter[]]]]]] | keyword[def] identifier[add] ( identifier[self] , identifier[media] , identifier[filepath] , identifier[overwrite] = keyword[True] ):
literal[string]
keyword[with] identifier[open] ( identifier[filepath] , literal[string] ) keyword[as] identifier[fhandler] :
identifier[self] . identifier[_dokuwiki] . identifier[send] ( literal[string] , identifier[media] ,
identifier[Binary] ( identifier[fhandler] . identifier[read] ()), identifier[ow] = identifier[overwrite] ) | def add(self, media, filepath, overwrite=True):
"""Set *media* from local file *filepath*. *overwrite* parameter specify
if the media must be overwrite if it exists remotely.
"""
with open(filepath, 'rb') as fhandler:
self._dokuwiki.send('wiki.putAttachment', media, Binary(fhandler.read()), ow=overwrite) # depends on [control=['with'], data=['fhandler']] |
def _plot(self):
"""Plot stacked serie lines and stacked secondary lines"""
for serie in self.series[::-1 if self.stack_from_top else 1]:
self.line(serie)
for serie in self.secondary_series[::-1 if self.stack_from_top else 1]:
self.line(serie, True) | def function[_plot, parameter[self]]:
constant[Plot stacked serie lines and stacked secondary lines]
for taget[name[serie]] in starred[call[name[self].series][<ast.Slice object at 0x7da18f811750>]] begin[:]
call[name[self].line, parameter[name[serie]]]
for taget[name[serie]] in starred[call[name[self].secondary_series][<ast.Slice object at 0x7da18f812f20>]] begin[:]
call[name[self].line, parameter[name[serie], constant[True]]] | keyword[def] identifier[_plot] ( identifier[self] ):
literal[string]
keyword[for] identifier[serie] keyword[in] identifier[self] . identifier[series] [::- literal[int] keyword[if] identifier[self] . identifier[stack_from_top] keyword[else] literal[int] ]:
identifier[self] . identifier[line] ( identifier[serie] )
keyword[for] identifier[serie] keyword[in] identifier[self] . identifier[secondary_series] [::- literal[int] keyword[if] identifier[self] . identifier[stack_from_top] keyword[else] literal[int] ]:
identifier[self] . identifier[line] ( identifier[serie] , keyword[True] ) | def _plot(self):
"""Plot stacked serie lines and stacked secondary lines"""
for serie in self.series[::-1 if self.stack_from_top else 1]:
self.line(serie) # depends on [control=['for'], data=['serie']]
for serie in self.secondary_series[::-1 if self.stack_from_top else 1]:
self.line(serie, True) # depends on [control=['for'], data=['serie']] |
def _construct_production_name(glyph_name, data=None):
"""Return the production name for a glyph name from the GlyphData.xml
database according to the AGL specification.
This should be run only if there is no official entry with a production
name in it.
Handles single glyphs (e.g. "brevecomb") and ligatures (e.g.
"brevecomb_acutecomb"). Returns None when a valid and semantically
meaningful production name can't be constructed or when the AGL
specification would be violated, get_glyph() will use the bare glyph
name then.
Note:
- Glyph name is the full name, e.g. "brevecomb_acutecomb.case".
- Base name is the base part, e.g. "brevecomb_acutecomb"
- Suffix is e.g. "case".
"""
# At this point, we have already checked the data for the full glyph name, so
# directly go to the base name here (e.g. when looking at "fi.alt").
base_name, dot, suffix = glyph_name.partition(".")
glyphinfo = _lookup_attributes(base_name, data)
if glyphinfo and glyphinfo.get("production"):
# Found the base glyph.
return glyphinfo["production"] + dot + suffix
if glyph_name in fontTools.agl.AGL2UV or base_name in fontTools.agl.AGL2UV:
# Glyph name is actually an AGLFN name.
return glyph_name
if "_" not in base_name:
# Nothing found so far and the glyph name isn't a ligature ("_"
# somewhere in it). The name does not carry any discernable Unicode
# semantics, so just return something sanitized.
return _agl_compliant_name(glyph_name)
# So we have a ligature that is not mapped in the data. Split it up and
# look up the individual parts.
base_name_parts = base_name.split("_")
# If all parts are in the AGLFN list, the glyph name is our production
# name already.
if all(part in fontTools.agl.AGL2UV for part in base_name_parts):
return _agl_compliant_name(glyph_name)
# Turn all parts of the ligature into production names.
_character_outside_BMP = False
production_names = []
for part in base_name_parts:
if part in fontTools.agl.AGL2UV:
# A name present in the AGLFN is a production name already.
production_names.append(part)
else:
part_entry = data.names.get(part) or {}
part_production_name = part_entry.get("production")
if part_production_name:
production_names.append(part_production_name)
# Take note if there are any characters outside the Unicode
# BMP, e.g. "u10FFF" or "u10FFFF". Do not catch e.g. "u013B"
# though.
if len(part_production_name) > 5 and _is_unicode_u_value(
part_production_name
):
_character_outside_BMP = True
else:
# We hit a part that does not seem to be a valid glyph name known to us,
# so the entire glyph name can't carry Unicode meaning. Return it
# sanitized.
return _agl_compliant_name(glyph_name)
# Some names Glyphs uses resolve to other names that are not uniXXXX names and may
# contain dots (e.g. idotaccent -> i.loclTRK). If there is any name with a "." in
# it before the last element, punt. We'd have to introduce a "." into the ligature
# midway, which is invalid according to the AGL. Example: "a_i.loclTRK" is valid,
# but "a_i.loclTRK_a" isn't.
if any("." in part for part in production_names[:-1]):
return _agl_compliant_name(glyph_name)
# If any production name starts with a "uni" and there are none of the
# "uXXXXX" format, try to turn all parts into "uni" names and concatenate
# them.
if not _character_outside_BMP and any(
part.startswith("uni") for part in production_names
):
uni_names = []
for part in production_names:
if part.startswith("uni"):
uni_names.append(part[3:])
elif len(part) == 5 and _is_unicode_u_value(part):
uni_names.append(part[1:])
elif part in fontTools.agl.AGL2UV:
uni_names.append("{:04X}".format(fontTools.agl.AGL2UV[part]))
else:
return None
final_production_name = "uni" + "".join(uni_names) + dot + suffix
else:
final_production_name = "_".join(production_names) + dot + suffix
return _agl_compliant_name(final_production_name) | def function[_construct_production_name, parameter[glyph_name, data]]:
constant[Return the production name for a glyph name from the GlyphData.xml
database according to the AGL specification.
This should be run only if there is no official entry with a production
name in it.
Handles single glyphs (e.g. "brevecomb") and ligatures (e.g.
"brevecomb_acutecomb"). Returns None when a valid and semantically
meaningful production name can't be constructed or when the AGL
specification would be violated, get_glyph() will use the bare glyph
name then.
Note:
- Glyph name is the full name, e.g. "brevecomb_acutecomb.case".
- Base name is the base part, e.g. "brevecomb_acutecomb"
- Suffix is e.g. "case".
]
<ast.Tuple object at 0x7da1b03da920> assign[=] call[name[glyph_name].partition, parameter[constant[.]]]
variable[glyphinfo] assign[=] call[name[_lookup_attributes], parameter[name[base_name], name[data]]]
if <ast.BoolOp object at 0x7da1b03da8f0> begin[:]
return[binary_operation[binary_operation[call[name[glyphinfo]][constant[production]] + name[dot]] + name[suffix]]]
if <ast.BoolOp object at 0x7da1b03db190> begin[:]
return[name[glyph_name]]
if compare[constant[_] <ast.NotIn object at 0x7da2590d7190> name[base_name]] begin[:]
return[call[name[_agl_compliant_name], parameter[name[glyph_name]]]]
variable[base_name_parts] assign[=] call[name[base_name].split, parameter[constant[_]]]
if call[name[all], parameter[<ast.GeneratorExp object at 0x7da1b03db430>]] begin[:]
return[call[name[_agl_compliant_name], parameter[name[glyph_name]]]]
variable[_character_outside_BMP] assign[=] constant[False]
variable[production_names] assign[=] list[[]]
for taget[name[part]] in starred[name[base_name_parts]] begin[:]
if compare[name[part] in name[fontTools].agl.AGL2UV] begin[:]
call[name[production_names].append, parameter[name[part]]]
if call[name[any], parameter[<ast.GeneratorExp object at 0x7da1b03e3f40>]] begin[:]
return[call[name[_agl_compliant_name], parameter[name[glyph_name]]]]
if <ast.BoolOp object at 0x7da1b03e3c40> begin[:]
variable[uni_names] assign[=] list[[]]
for taget[name[part]] in starred[name[production_names]] begin[:]
if call[name[part].startswith, parameter[constant[uni]]] begin[:]
call[name[uni_names].append, parameter[call[name[part]][<ast.Slice object at 0x7da1b03e07c0>]]]
variable[final_production_name] assign[=] binary_operation[binary_operation[binary_operation[constant[uni] + call[constant[].join, parameter[name[uni_names]]]] + name[dot]] + name[suffix]]
return[call[name[_agl_compliant_name], parameter[name[final_production_name]]]] | keyword[def] identifier[_construct_production_name] ( identifier[glyph_name] , identifier[data] = keyword[None] ):
literal[string]
identifier[base_name] , identifier[dot] , identifier[suffix] = identifier[glyph_name] . identifier[partition] ( literal[string] )
identifier[glyphinfo] = identifier[_lookup_attributes] ( identifier[base_name] , identifier[data] )
keyword[if] identifier[glyphinfo] keyword[and] identifier[glyphinfo] . identifier[get] ( literal[string] ):
keyword[return] identifier[glyphinfo] [ literal[string] ]+ identifier[dot] + identifier[suffix]
keyword[if] identifier[glyph_name] keyword[in] identifier[fontTools] . identifier[agl] . identifier[AGL2UV] keyword[or] identifier[base_name] keyword[in] identifier[fontTools] . identifier[agl] . identifier[AGL2UV] :
keyword[return] identifier[glyph_name]
keyword[if] literal[string] keyword[not] keyword[in] identifier[base_name] :
keyword[return] identifier[_agl_compliant_name] ( identifier[glyph_name] )
identifier[base_name_parts] = identifier[base_name] . identifier[split] ( literal[string] )
keyword[if] identifier[all] ( identifier[part] keyword[in] identifier[fontTools] . identifier[agl] . identifier[AGL2UV] keyword[for] identifier[part] keyword[in] identifier[base_name_parts] ):
keyword[return] identifier[_agl_compliant_name] ( identifier[glyph_name] )
identifier[_character_outside_BMP] = keyword[False]
identifier[production_names] =[]
keyword[for] identifier[part] keyword[in] identifier[base_name_parts] :
keyword[if] identifier[part] keyword[in] identifier[fontTools] . identifier[agl] . identifier[AGL2UV] :
identifier[production_names] . identifier[append] ( identifier[part] )
keyword[else] :
identifier[part_entry] = identifier[data] . identifier[names] . identifier[get] ( identifier[part] ) keyword[or] {}
identifier[part_production_name] = identifier[part_entry] . identifier[get] ( literal[string] )
keyword[if] identifier[part_production_name] :
identifier[production_names] . identifier[append] ( identifier[part_production_name] )
keyword[if] identifier[len] ( identifier[part_production_name] )> literal[int] keyword[and] identifier[_is_unicode_u_value] (
identifier[part_production_name]
):
identifier[_character_outside_BMP] = keyword[True]
keyword[else] :
keyword[return] identifier[_agl_compliant_name] ( identifier[glyph_name] )
keyword[if] identifier[any] ( literal[string] keyword[in] identifier[part] keyword[for] identifier[part] keyword[in] identifier[production_names] [:- literal[int] ]):
keyword[return] identifier[_agl_compliant_name] ( identifier[glyph_name] )
keyword[if] keyword[not] identifier[_character_outside_BMP] keyword[and] identifier[any] (
identifier[part] . identifier[startswith] ( literal[string] ) keyword[for] identifier[part] keyword[in] identifier[production_names]
):
identifier[uni_names] =[]
keyword[for] identifier[part] keyword[in] identifier[production_names] :
keyword[if] identifier[part] . identifier[startswith] ( literal[string] ):
identifier[uni_names] . identifier[append] ( identifier[part] [ literal[int] :])
keyword[elif] identifier[len] ( identifier[part] )== literal[int] keyword[and] identifier[_is_unicode_u_value] ( identifier[part] ):
identifier[uni_names] . identifier[append] ( identifier[part] [ literal[int] :])
keyword[elif] identifier[part] keyword[in] identifier[fontTools] . identifier[agl] . identifier[AGL2UV] :
identifier[uni_names] . identifier[append] ( literal[string] . identifier[format] ( identifier[fontTools] . identifier[agl] . identifier[AGL2UV] [ identifier[part] ]))
keyword[else] :
keyword[return] keyword[None]
identifier[final_production_name] = literal[string] + literal[string] . identifier[join] ( identifier[uni_names] )+ identifier[dot] + identifier[suffix]
keyword[else] :
identifier[final_production_name] = literal[string] . identifier[join] ( identifier[production_names] )+ identifier[dot] + identifier[suffix]
keyword[return] identifier[_agl_compliant_name] ( identifier[final_production_name] ) | def _construct_production_name(glyph_name, data=None):
"""Return the production name for a glyph name from the GlyphData.xml
database according to the AGL specification.
This should be run only if there is no official entry with a production
name in it.
Handles single glyphs (e.g. "brevecomb") and ligatures (e.g.
"brevecomb_acutecomb"). Returns None when a valid and semantically
meaningful production name can't be constructed or when the AGL
specification would be violated, get_glyph() will use the bare glyph
name then.
Note:
- Glyph name is the full name, e.g. "brevecomb_acutecomb.case".
- Base name is the base part, e.g. "brevecomb_acutecomb"
- Suffix is e.g. "case".
"""
# At this point, we have already checked the data for the full glyph name, so
# directly go to the base name here (e.g. when looking at "fi.alt").
(base_name, dot, suffix) = glyph_name.partition('.')
glyphinfo = _lookup_attributes(base_name, data)
if glyphinfo and glyphinfo.get('production'):
# Found the base glyph.
return glyphinfo['production'] + dot + suffix # depends on [control=['if'], data=[]]
if glyph_name in fontTools.agl.AGL2UV or base_name in fontTools.agl.AGL2UV:
# Glyph name is actually an AGLFN name.
return glyph_name # depends on [control=['if'], data=[]]
if '_' not in base_name:
# Nothing found so far and the glyph name isn't a ligature ("_"
# somewhere in it). The name does not carry any discernable Unicode
# semantics, so just return something sanitized.
return _agl_compliant_name(glyph_name) # depends on [control=['if'], data=[]]
# So we have a ligature that is not mapped in the data. Split it up and
# look up the individual parts.
base_name_parts = base_name.split('_')
# If all parts are in the AGLFN list, the glyph name is our production
# name already.
if all((part in fontTools.agl.AGL2UV for part in base_name_parts)):
return _agl_compliant_name(glyph_name) # depends on [control=['if'], data=[]]
# Turn all parts of the ligature into production names.
_character_outside_BMP = False
production_names = []
for part in base_name_parts:
if part in fontTools.agl.AGL2UV:
# A name present in the AGLFN is a production name already.
production_names.append(part) # depends on [control=['if'], data=['part']]
else:
part_entry = data.names.get(part) or {}
part_production_name = part_entry.get('production')
if part_production_name:
production_names.append(part_production_name)
# Take note if there are any characters outside the Unicode
# BMP, e.g. "u10FFF" or "u10FFFF". Do not catch e.g. "u013B"
# though.
if len(part_production_name) > 5 and _is_unicode_u_value(part_production_name):
_character_outside_BMP = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
# We hit a part that does not seem to be a valid glyph name known to us,
# so the entire glyph name can't carry Unicode meaning. Return it
# sanitized.
return _agl_compliant_name(glyph_name) # depends on [control=['for'], data=['part']]
# Some names Glyphs uses resolve to other names that are not uniXXXX names and may
# contain dots (e.g. idotaccent -> i.loclTRK). If there is any name with a "." in
# it before the last element, punt. We'd have to introduce a "." into the ligature
# midway, which is invalid according to the AGL. Example: "a_i.loclTRK" is valid,
# but "a_i.loclTRK_a" isn't.
if any(('.' in part for part in production_names[:-1])):
return _agl_compliant_name(glyph_name) # depends on [control=['if'], data=[]]
# If any production name starts with a "uni" and there are none of the
# "uXXXXX" format, try to turn all parts into "uni" names and concatenate
# them.
if not _character_outside_BMP and any((part.startswith('uni') for part in production_names)):
uni_names = []
for part in production_names:
if part.startswith('uni'):
uni_names.append(part[3:]) # depends on [control=['if'], data=[]]
elif len(part) == 5 and _is_unicode_u_value(part):
uni_names.append(part[1:]) # depends on [control=['if'], data=[]]
elif part in fontTools.agl.AGL2UV:
uni_names.append('{:04X}'.format(fontTools.agl.AGL2UV[part])) # depends on [control=['if'], data=['part']]
else:
return None # depends on [control=['for'], data=['part']]
final_production_name = 'uni' + ''.join(uni_names) + dot + suffix # depends on [control=['if'], data=[]]
else:
final_production_name = '_'.join(production_names) + dot + suffix
return _agl_compliant_name(final_production_name) |
def deps_used(self, pkg, used):
"""Create dependencies dictionary
"""
if find_package(pkg + self.meta.sp, self.meta.pkg_path):
if pkg not in self.deps_dict.values():
self.deps_dict[pkg] = used
else:
self.deps_dict[pkg] += used | def function[deps_used, parameter[self, pkg, used]]:
constant[Create dependencies dictionary
]
if call[name[find_package], parameter[binary_operation[name[pkg] + name[self].meta.sp], name[self].meta.pkg_path]] begin[:]
if compare[name[pkg] <ast.NotIn object at 0x7da2590d7190> call[name[self].deps_dict.values, parameter[]]] begin[:]
call[name[self].deps_dict][name[pkg]] assign[=] name[used] | keyword[def] identifier[deps_used] ( identifier[self] , identifier[pkg] , identifier[used] ):
literal[string]
keyword[if] identifier[find_package] ( identifier[pkg] + identifier[self] . identifier[meta] . identifier[sp] , identifier[self] . identifier[meta] . identifier[pkg_path] ):
keyword[if] identifier[pkg] keyword[not] keyword[in] identifier[self] . identifier[deps_dict] . identifier[values] ():
identifier[self] . identifier[deps_dict] [ identifier[pkg] ]= identifier[used]
keyword[else] :
identifier[self] . identifier[deps_dict] [ identifier[pkg] ]+= identifier[used] | def deps_used(self, pkg, used):
"""Create dependencies dictionary
"""
if find_package(pkg + self.meta.sp, self.meta.pkg_path):
if pkg not in self.deps_dict.values():
self.deps_dict[pkg] = used # depends on [control=['if'], data=['pkg']]
else:
self.deps_dict[pkg] += used # depends on [control=['if'], data=[]] |
def logarithm(requestContext, seriesList, base=10):
"""
Takes one metric or a wildcard seriesList, a base, and draws the y-axis in
logarithmic format. If base is omitted, the function defaults to base 10.
Example::
&target=log(carbon.agents.hostname.avgUpdateTime,2)
"""
results = []
for series in seriesList:
newValues = []
for val in series:
if val is None:
newValues.append(None)
elif val <= 0:
newValues.append(None)
else:
newValues.append(math.log(val, base))
newName = "log(%s, %s)" % (series.name, base)
newSeries = TimeSeries(newName, series.start, series.end, series.step,
newValues)
newSeries.pathExpression = newName
results.append(newSeries)
return results | def function[logarithm, parameter[requestContext, seriesList, base]]:
constant[
Takes one metric or a wildcard seriesList, a base, and draws the y-axis in
logarithmic format. If base is omitted, the function defaults to base 10.
Example::
&target=log(carbon.agents.hostname.avgUpdateTime,2)
]
variable[results] assign[=] list[[]]
for taget[name[series]] in starred[name[seriesList]] begin[:]
variable[newValues] assign[=] list[[]]
for taget[name[val]] in starred[name[series]] begin[:]
if compare[name[val] is constant[None]] begin[:]
call[name[newValues].append, parameter[constant[None]]]
variable[newName] assign[=] binary_operation[constant[log(%s, %s)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b057a140>, <ast.Name object at 0x7da1b057b790>]]]
variable[newSeries] assign[=] call[name[TimeSeries], parameter[name[newName], name[series].start, name[series].end, name[series].step, name[newValues]]]
name[newSeries].pathExpression assign[=] name[newName]
call[name[results].append, parameter[name[newSeries]]]
return[name[results]] | keyword[def] identifier[logarithm] ( identifier[requestContext] , identifier[seriesList] , identifier[base] = literal[int] ):
literal[string]
identifier[results] =[]
keyword[for] identifier[series] keyword[in] identifier[seriesList] :
identifier[newValues] =[]
keyword[for] identifier[val] keyword[in] identifier[series] :
keyword[if] identifier[val] keyword[is] keyword[None] :
identifier[newValues] . identifier[append] ( keyword[None] )
keyword[elif] identifier[val] <= literal[int] :
identifier[newValues] . identifier[append] ( keyword[None] )
keyword[else] :
identifier[newValues] . identifier[append] ( identifier[math] . identifier[log] ( identifier[val] , identifier[base] ))
identifier[newName] = literal[string] %( identifier[series] . identifier[name] , identifier[base] )
identifier[newSeries] = identifier[TimeSeries] ( identifier[newName] , identifier[series] . identifier[start] , identifier[series] . identifier[end] , identifier[series] . identifier[step] ,
identifier[newValues] )
identifier[newSeries] . identifier[pathExpression] = identifier[newName]
identifier[results] . identifier[append] ( identifier[newSeries] )
keyword[return] identifier[results] | def logarithm(requestContext, seriesList, base=10):
"""
Takes one metric or a wildcard seriesList, a base, and draws the y-axis in
logarithmic format. If base is omitted, the function defaults to base 10.
Example::
&target=log(carbon.agents.hostname.avgUpdateTime,2)
"""
results = []
for series in seriesList:
newValues = []
for val in series:
if val is None:
newValues.append(None) # depends on [control=['if'], data=[]]
elif val <= 0:
newValues.append(None) # depends on [control=['if'], data=[]]
else:
newValues.append(math.log(val, base)) # depends on [control=['for'], data=['val']]
newName = 'log(%s, %s)' % (series.name, base)
newSeries = TimeSeries(newName, series.start, series.end, series.step, newValues)
newSeries.pathExpression = newName
results.append(newSeries) # depends on [control=['for'], data=['series']]
return results |
def _versioned_lib_suffix(env, suffix, version):
"""Generate versioned shared library suffix from a unversioned one.
If suffix='.dll', and version='0.1.2', then it returns '-0-1-2.dll'"""
Verbose = False
if Verbose:
print("_versioned_lib_suffix: suffix= ", suffix)
print("_versioned_lib_suffix: version= ", version)
cygversion = re.sub('\.', '-', version)
if not suffix.startswith('-' + cygversion):
suffix = '-' + cygversion + suffix
if Verbose:
print("_versioned_lib_suffix: return suffix= ", suffix)
return suffix | def function[_versioned_lib_suffix, parameter[env, suffix, version]]:
constant[Generate versioned shared library suffix from a unversioned one.
If suffix='.dll', and version='0.1.2', then it returns '-0-1-2.dll']
variable[Verbose] assign[=] constant[False]
if name[Verbose] begin[:]
call[name[print], parameter[constant[_versioned_lib_suffix: suffix= ], name[suffix]]]
call[name[print], parameter[constant[_versioned_lib_suffix: version= ], name[version]]]
variable[cygversion] assign[=] call[name[re].sub, parameter[constant[\.], constant[-], name[version]]]
if <ast.UnaryOp object at 0x7da18fe90e80> begin[:]
variable[suffix] assign[=] binary_operation[binary_operation[constant[-] + name[cygversion]] + name[suffix]]
if name[Verbose] begin[:]
call[name[print], parameter[constant[_versioned_lib_suffix: return suffix= ], name[suffix]]]
return[name[suffix]] | keyword[def] identifier[_versioned_lib_suffix] ( identifier[env] , identifier[suffix] , identifier[version] ):
literal[string]
identifier[Verbose] = keyword[False]
keyword[if] identifier[Verbose] :
identifier[print] ( literal[string] , identifier[suffix] )
identifier[print] ( literal[string] , identifier[version] )
identifier[cygversion] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[version] )
keyword[if] keyword[not] identifier[suffix] . identifier[startswith] ( literal[string] + identifier[cygversion] ):
identifier[suffix] = literal[string] + identifier[cygversion] + identifier[suffix]
keyword[if] identifier[Verbose] :
identifier[print] ( literal[string] , identifier[suffix] )
keyword[return] identifier[suffix] | def _versioned_lib_suffix(env, suffix, version):
"""Generate versioned shared library suffix from a unversioned one.
If suffix='.dll', and version='0.1.2', then it returns '-0-1-2.dll'"""
Verbose = False
if Verbose:
print('_versioned_lib_suffix: suffix= ', suffix)
print('_versioned_lib_suffix: version= ', version) # depends on [control=['if'], data=[]]
cygversion = re.sub('\\.', '-', version)
if not suffix.startswith('-' + cygversion):
suffix = '-' + cygversion + suffix # depends on [control=['if'], data=[]]
if Verbose:
print('_versioned_lib_suffix: return suffix= ', suffix) # depends on [control=['if'], data=[]]
return suffix |
def normalize_strategy_parameters(params):
"""Normalize strategy parameters to be a list of strings.
Parameters
----------
params : (space-delimited) string or sequence of strings/numbers Parameters
expected by :class:`SampleStrategy` object, in various forms, where the first
parameter is the name of the strategy.
Returns
-------
params : tuple of strings
Strategy parameters as a list of strings
"""
def fixup_numbers(val):
try:
# See if it is a number
return str(float(val))
except ValueError:
# ok, it is not a number we know of, perhaps a string
return str(val)
if isinstance(params, basestring):
params = params.split(' ')
# No number
return tuple(fixup_numbers(p) for p in params) | def function[normalize_strategy_parameters, parameter[params]]:
constant[Normalize strategy parameters to be a list of strings.
Parameters
----------
params : (space-delimited) string or sequence of strings/numbers Parameters
expected by :class:`SampleStrategy` object, in various forms, where the first
parameter is the name of the strategy.
Returns
-------
params : tuple of strings
Strategy parameters as a list of strings
]
def function[fixup_numbers, parameter[val]]:
<ast.Try object at 0x7da1b05d8b50>
if call[name[isinstance], parameter[name[params], name[basestring]]] begin[:]
variable[params] assign[=] call[name[params].split, parameter[constant[ ]]]
return[call[name[tuple], parameter[<ast.GeneratorExp object at 0x7da1b05db400>]]] | keyword[def] identifier[normalize_strategy_parameters] ( identifier[params] ):
literal[string]
keyword[def] identifier[fixup_numbers] ( identifier[val] ):
keyword[try] :
keyword[return] identifier[str] ( identifier[float] ( identifier[val] ))
keyword[except] identifier[ValueError] :
keyword[return] identifier[str] ( identifier[val] )
keyword[if] identifier[isinstance] ( identifier[params] , identifier[basestring] ):
identifier[params] = identifier[params] . identifier[split] ( literal[string] )
keyword[return] identifier[tuple] ( identifier[fixup_numbers] ( identifier[p] ) keyword[for] identifier[p] keyword[in] identifier[params] ) | def normalize_strategy_parameters(params):
"""Normalize strategy parameters to be a list of strings.
Parameters
----------
params : (space-delimited) string or sequence of strings/numbers Parameters
expected by :class:`SampleStrategy` object, in various forms, where the first
parameter is the name of the strategy.
Returns
-------
params : tuple of strings
Strategy parameters as a list of strings
"""
def fixup_numbers(val):
try:
# See if it is a number
return str(float(val)) # depends on [control=['try'], data=[]]
except ValueError:
# ok, it is not a number we know of, perhaps a string
return str(val) # depends on [control=['except'], data=[]]
if isinstance(params, basestring):
params = params.split(' ') # depends on [control=['if'], data=[]]
# No number
return tuple((fixup_numbers(p) for p in params)) |
def login_oauth2_user(valid, oauth):
"""Log in a user after having been verified."""
if valid:
oauth.user.login_via_oauth2 = True
_request_ctx_stack.top.user = oauth.user
identity_changed.send(current_app._get_current_object(),
identity=Identity(oauth.user.id))
return valid, oauth | def function[login_oauth2_user, parameter[valid, oauth]]:
constant[Log in a user after having been verified.]
if name[valid] begin[:]
name[oauth].user.login_via_oauth2 assign[=] constant[True]
name[_request_ctx_stack].top.user assign[=] name[oauth].user
call[name[identity_changed].send, parameter[call[name[current_app]._get_current_object, parameter[]]]]
return[tuple[[<ast.Name object at 0x7da1b253f6a0>, <ast.Name object at 0x7da1b253df30>]]] | keyword[def] identifier[login_oauth2_user] ( identifier[valid] , identifier[oauth] ):
literal[string]
keyword[if] identifier[valid] :
identifier[oauth] . identifier[user] . identifier[login_via_oauth2] = keyword[True]
identifier[_request_ctx_stack] . identifier[top] . identifier[user] = identifier[oauth] . identifier[user]
identifier[identity_changed] . identifier[send] ( identifier[current_app] . identifier[_get_current_object] (),
identifier[identity] = identifier[Identity] ( identifier[oauth] . identifier[user] . identifier[id] ))
keyword[return] identifier[valid] , identifier[oauth] | def login_oauth2_user(valid, oauth):
"""Log in a user after having been verified."""
if valid:
oauth.user.login_via_oauth2 = True
_request_ctx_stack.top.user = oauth.user
identity_changed.send(current_app._get_current_object(), identity=Identity(oauth.user.id)) # depends on [control=['if'], data=[]]
return (valid, oauth) |
def _set_association(self, v, load=False):
"""
Setter method for association, mapped from YANG variable /interface/port_channel/switchport/private_vlan/association (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_association is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_association() directly.
YANG Description: Association
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=association.association, is_container='container', presence=False, yang_name="association", rest_name="association", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'trunk-association', u'sort-priority': u'RUNNCFG_INTERFACE_LEVEL_PVLAN_ASSOCIATION'}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """association must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=association.association, is_container='container', presence=False, yang_name="association", rest_name="association", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'trunk-association', u'sort-priority': u'RUNNCFG_INTERFACE_LEVEL_PVLAN_ASSOCIATION'}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='container', is_config=True)""",
})
self.__association = t
if hasattr(self, '_set'):
self._set() | def function[_set_association, parameter[self, v, load]]:
constant[
Setter method for association, mapped from YANG variable /interface/port_channel/switchport/private_vlan/association (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_association is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_association() directly.
YANG Description: Association
]
if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:]
variable[v] assign[=] call[name[v]._utype, parameter[name[v]]]
<ast.Try object at 0x7da18fe92170>
name[self].__association assign[=] name[t]
if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:]
call[name[self]._set, parameter[]] | keyword[def] identifier[_set_association] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ):
identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] )
keyword[try] :
identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[association] . identifier[association] , identifier[is_container] = literal[string] , identifier[presence] = keyword[False] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : literal[string] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] )
keyword[except] ( identifier[TypeError] , identifier[ValueError] ):
keyword[raise] identifier[ValueError] ({
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
})
identifier[self] . identifier[__association] = identifier[t]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[_set] () | def _set_association(self, v, load=False):
"""
Setter method for association, mapped from YANG variable /interface/port_channel/switchport/private_vlan/association (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_association is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_association() directly.
YANG Description: Association
"""
if hasattr(v, '_utype'):
v = v._utype(v) # depends on [control=['if'], data=[]]
try:
t = YANGDynClass(v, base=association.association, is_container='container', presence=False, yang_name='association', rest_name='association', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'trunk-association', u'sort-priority': u'RUNNCFG_INTERFACE_LEVEL_PVLAN_ASSOCIATION'}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='container', is_config=True) # depends on [control=['try'], data=[]]
except (TypeError, ValueError):
raise ValueError({'error-string': 'association must be of a type compatible with container', 'defined-type': 'container', 'generated-type': 'YANGDynClass(base=association.association, is_container=\'container\', presence=False, yang_name="association", rest_name="association", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'info\': u\'trunk-association\', u\'sort-priority\': u\'RUNNCFG_INTERFACE_LEVEL_PVLAN_ASSOCIATION\'}}, namespace=\'urn:brocade.com:mgmt:brocade-interface\', defining_module=\'brocade-interface\', yang_type=\'container\', is_config=True)'}) # depends on [control=['except'], data=[]]
self.__association = t
if hasattr(self, '_set'):
self._set() # depends on [control=['if'], data=[]] |
def add_x10_device(self, housecode, unitcode, dev_type):
"""Add an X10 device to the PLM."""
device = None
try:
device = self.plm.devices.add_x10_device(self.plm, housecode,
unitcode, dev_type)
except ValueError:
pass
return device | def function[add_x10_device, parameter[self, housecode, unitcode, dev_type]]:
constant[Add an X10 device to the PLM.]
variable[device] assign[=] constant[None]
<ast.Try object at 0x7da1b1a44d90>
return[name[device]] | keyword[def] identifier[add_x10_device] ( identifier[self] , identifier[housecode] , identifier[unitcode] , identifier[dev_type] ):
literal[string]
identifier[device] = keyword[None]
keyword[try] :
identifier[device] = identifier[self] . identifier[plm] . identifier[devices] . identifier[add_x10_device] ( identifier[self] . identifier[plm] , identifier[housecode] ,
identifier[unitcode] , identifier[dev_type] )
keyword[except] identifier[ValueError] :
keyword[pass]
keyword[return] identifier[device] | def add_x10_device(self, housecode, unitcode, dev_type):
"""Add an X10 device to the PLM."""
device = None
try:
device = self.plm.devices.add_x10_device(self.plm, housecode, unitcode, dev_type) # depends on [control=['try'], data=[]]
except ValueError:
pass # depends on [control=['except'], data=[]]
return device |
def units(cls, scale=1):
'''
:scale: optional integer scaling factor
:return: list of three Point subclass
Returns three points whose coordinates are the head of a
unit vector from the origin ( conventionally i, j and k).
'''
return [cls(x=scale), cls(y=scale), cls(z=scale)] | def function[units, parameter[cls, scale]]:
constant[
:scale: optional integer scaling factor
:return: list of three Point subclass
Returns three points whose coordinates are the head of a
unit vector from the origin ( conventionally i, j and k).
]
return[list[[<ast.Call object at 0x7da1b11d97b0>, <ast.Call object at 0x7da1b11db130>, <ast.Call object at 0x7da1b11db520>]]] | keyword[def] identifier[units] ( identifier[cls] , identifier[scale] = literal[int] ):
literal[string]
keyword[return] [ identifier[cls] ( identifier[x] = identifier[scale] ), identifier[cls] ( identifier[y] = identifier[scale] ), identifier[cls] ( identifier[z] = identifier[scale] )] | def units(cls, scale=1):
"""
:scale: optional integer scaling factor
:return: list of three Point subclass
Returns three points whose coordinates are the head of a
unit vector from the origin ( conventionally i, j and k).
"""
return [cls(x=scale), cls(y=scale), cls(z=scale)] |
def connected(self, *, presence=structs.PresenceState(False), **kwargs):
"""
Return a :class:`.node.UseConnected` context manager which does not
modify the presence settings.
The keyword arguments are passed to the :class:`.node.UseConnected`
context manager constructor.
.. versionadded:: 0.8
"""
return UseConnected(self, presence=presence, **kwargs) | def function[connected, parameter[self]]:
constant[
Return a :class:`.node.UseConnected` context manager which does not
modify the presence settings.
The keyword arguments are passed to the :class:`.node.UseConnected`
context manager constructor.
.. versionadded:: 0.8
]
return[call[name[UseConnected], parameter[name[self]]]] | keyword[def] identifier[connected] ( identifier[self] ,*, identifier[presence] = identifier[structs] . identifier[PresenceState] ( keyword[False] ),** identifier[kwargs] ):
literal[string]
keyword[return] identifier[UseConnected] ( identifier[self] , identifier[presence] = identifier[presence] ,** identifier[kwargs] ) | def connected(self, *, presence=structs.PresenceState(False), **kwargs):
"""
Return a :class:`.node.UseConnected` context manager which does not
modify the presence settings.
The keyword arguments are passed to the :class:`.node.UseConnected`
context manager constructor.
.. versionadded:: 0.8
"""
return UseConnected(self, presence=presence, **kwargs) |
def rate_overall(self):
"""Returns the overall average rate based on the start time."""
elapsed = self.elapsed
return self.rate if not elapsed else self.numerator / self.elapsed | def function[rate_overall, parameter[self]]:
constant[Returns the overall average rate based on the start time.]
variable[elapsed] assign[=] name[self].elapsed
return[<ast.IfExp object at 0x7da18f09e2f0>] | keyword[def] identifier[rate_overall] ( identifier[self] ):
literal[string]
identifier[elapsed] = identifier[self] . identifier[elapsed]
keyword[return] identifier[self] . identifier[rate] keyword[if] keyword[not] identifier[elapsed] keyword[else] identifier[self] . identifier[numerator] / identifier[self] . identifier[elapsed] | def rate_overall(self):
"""Returns the overall average rate based on the start time."""
elapsed = self.elapsed
return self.rate if not elapsed else self.numerator / self.elapsed |
def _transition(self, duration, brightness):
""" Complete a transition.
:param duration: Duration of transition.
:param brightness: Transition to this brightness.
"""
# Set initial value.
b_start = self.brightness
# Compute ideal step amount.
b_steps = 0
if brightness is not None:
b_steps = steps(self.brightness, brightness,
self.command_set.brightness_steps)
# Compute ideal step amount (at least one).
# Calculate wait.
wait = self._wait(duration, b_steps, b_steps)
# Scale down steps if no wait time.
if wait == 0:
b_steps = self._scale_steps(duration, b_steps,
b_steps)
# Perform transition.
for i in range(b_steps):
# Brightness.
if b_steps > 0:
self.brightness = util.transition(i, b_steps,
b_start, brightness)
time.sleep(wait) | def function[_transition, parameter[self, duration, brightness]]:
constant[ Complete a transition.
:param duration: Duration of transition.
:param brightness: Transition to this brightness.
]
variable[b_start] assign[=] name[self].brightness
variable[b_steps] assign[=] constant[0]
if compare[name[brightness] is_not constant[None]] begin[:]
variable[b_steps] assign[=] call[name[steps], parameter[name[self].brightness, name[brightness], name[self].command_set.brightness_steps]]
variable[wait] assign[=] call[name[self]._wait, parameter[name[duration], name[b_steps], name[b_steps]]]
if compare[name[wait] equal[==] constant[0]] begin[:]
variable[b_steps] assign[=] call[name[self]._scale_steps, parameter[name[duration], name[b_steps], name[b_steps]]]
for taget[name[i]] in starred[call[name[range], parameter[name[b_steps]]]] begin[:]
if compare[name[b_steps] greater[>] constant[0]] begin[:]
name[self].brightness assign[=] call[name[util].transition, parameter[name[i], name[b_steps], name[b_start], name[brightness]]]
call[name[time].sleep, parameter[name[wait]]] | keyword[def] identifier[_transition] ( identifier[self] , identifier[duration] , identifier[brightness] ):
literal[string]
identifier[b_start] = identifier[self] . identifier[brightness]
identifier[b_steps] = literal[int]
keyword[if] identifier[brightness] keyword[is] keyword[not] keyword[None] :
identifier[b_steps] = identifier[steps] ( identifier[self] . identifier[brightness] , identifier[brightness] ,
identifier[self] . identifier[command_set] . identifier[brightness_steps] )
identifier[wait] = identifier[self] . identifier[_wait] ( identifier[duration] , identifier[b_steps] , identifier[b_steps] )
keyword[if] identifier[wait] == literal[int] :
identifier[b_steps] = identifier[self] . identifier[_scale_steps] ( identifier[duration] , identifier[b_steps] ,
identifier[b_steps] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[b_steps] ):
keyword[if] identifier[b_steps] > literal[int] :
identifier[self] . identifier[brightness] = identifier[util] . identifier[transition] ( identifier[i] , identifier[b_steps] ,
identifier[b_start] , identifier[brightness] )
identifier[time] . identifier[sleep] ( identifier[wait] ) | def _transition(self, duration, brightness):
""" Complete a transition.
:param duration: Duration of transition.
:param brightness: Transition to this brightness.
"""
# Set initial value.
b_start = self.brightness
# Compute ideal step amount.
b_steps = 0
if brightness is not None:
b_steps = steps(self.brightness, brightness, self.command_set.brightness_steps) # depends on [control=['if'], data=['brightness']]
# Compute ideal step amount (at least one).
# Calculate wait.
wait = self._wait(duration, b_steps, b_steps)
# Scale down steps if no wait time.
if wait == 0:
b_steps = self._scale_steps(duration, b_steps, b_steps) # depends on [control=['if'], data=[]]
# Perform transition.
for i in range(b_steps):
# Brightness.
if b_steps > 0:
self.brightness = util.transition(i, b_steps, b_start, brightness) # depends on [control=['if'], data=['b_steps']]
time.sleep(wait) # depends on [control=['for'], data=['i']] |
def _dspace(
irez,
d2201, d2211, d3210, d3222, d4410,
d4422, d5220, d5232, d5421, d5433,
dedt, del1, del2, del3, didt,
dmdt, dnodt, domdt, argpo, argpdot,
t, tc, gsto, xfact, xlamo,
no,
atime, em, argpm, inclm, xli,
mm, xni, nodem, nm,
):
fasx2 = 0.13130908;
fasx4 = 2.8843198;
fasx6 = 0.37448087;
g22 = 5.7686396;
g32 = 0.95240898;
g44 = 1.8014998;
g52 = 1.0508330;
g54 = 4.4108898;
rptim = 4.37526908801129966e-3; # equates to 7.29211514668855e-5 rad/sec
stepp = 720.0;
stepn = -720.0;
step2 = 259200.0;
# ----------- calculate deep space resonance effects -----------
dndt = 0.0;
theta = (gsto + tc * rptim) % twopi
em = em + dedt * t;
inclm = inclm + didt * t;
argpm = argpm + domdt * t;
nodem = nodem + dnodt * t;
mm = mm + dmdt * t;
"""
// sgp4fix for negative inclinations
// the following if statement should be commented out
// if (inclm < 0.0)
// {
// inclm = -inclm;
// argpm = argpm - pi;
// nodem = nodem + pi;
// }
/* - update resonances : numerical (euler-maclaurin) integration - */
/* ------------------------- epoch restart ---------------------- */
// sgp4fix for propagator problems
// the following integration works for negative time steps and periods
// the specific changes are unknown because the original code was so convoluted
// sgp4fix take out atime = 0.0 and fix for faster operation
"""
ft = 0.0;
if irez != 0:
# sgp4fix streamline check
if atime == 0.0 or t * atime <= 0.0 or fabs(t) < fabs(atime):
atime = 0.0;
xni = no;
xli = xlamo;
# sgp4fix move check outside loop
if t > 0.0:
delt = stepp;
else:
delt = stepn;
iretn = 381; # added for do loop
iret = 0; # added for loop
while iretn == 381:
# ------------------- dot terms calculated -------------
# ----------- near - synchronous resonance terms -------
if irez != 2:
xndt = del1 * sin(xli - fasx2) + del2 * sin(2.0 * (xli - fasx4)) + \
del3 * sin(3.0 * (xli - fasx6));
xldot = xni + xfact;
xnddt = del1 * cos(xli - fasx2) + \
2.0 * del2 * cos(2.0 * (xli - fasx4)) + \
3.0 * del3 * cos(3.0 * (xli - fasx6));
xnddt = xnddt * xldot;
else:
# --------- near - half-day resonance terms --------
xomi = argpo + argpdot * atime;
x2omi = xomi + xomi;
x2li = xli + xli;
xndt = (d2201 * sin(x2omi + xli - g22) + d2211 * sin(xli - g22) +
d3210 * sin(xomi + xli - g32) + d3222 * sin(-xomi + xli - g32)+
d4410 * sin(x2omi + x2li - g44)+ d4422 * sin(x2li - g44) +
d5220 * sin(xomi + xli - g52) + d5232 * sin(-xomi + xli - g52)+
d5421 * sin(xomi + x2li - g54) + d5433 * sin(-xomi + x2li - g54));
xldot = xni + xfact;
xnddt = (d2201 * cos(x2omi + xli - g22) + d2211 * cos(xli - g22) +
d3210 * cos(xomi + xli - g32) + d3222 * cos(-xomi + xli - g32) +
d5220 * cos(xomi + xli - g52) + d5232 * cos(-xomi + xli - g52) +
2.0 * (d4410 * cos(x2omi + x2li - g44) +
d4422 * cos(x2li - g44) + d5421 * cos(xomi + x2li - g54) +
d5433 * cos(-xomi + x2li - g54)));
xnddt = xnddt * xldot;
# ----------------------- integrator -------------------
# sgp4fix move end checks to end of routine
if fabs(t - atime) >= stepp:
iret = 0;
iretn = 381;
else:
ft = t - atime;
iretn = 0;
if iretn == 381:
xli = xli + xldot * delt + xndt * step2;
xni = xni + xndt * delt + xnddt * step2;
atime = atime + delt;
nm = xni + xndt * ft + xnddt * ft * ft * 0.5;
xl = xli + xldot * ft + xndt * ft * ft * 0.5;
if irez != 1:
mm = xl - 2.0 * nodem + 2.0 * theta;
dndt = nm - no;
else:
mm = xl - nodem - argpm + theta;
dndt = nm - no;
nm = no + dndt;
return (
atime, em, argpm, inclm, xli,
mm, xni, nodem, dndt, nm,
) | def function[_dspace, parameter[irez, d2201, d2211, d3210, d3222, d4410, d4422, d5220, d5232, d5421, d5433, dedt, del1, del2, del3, didt, dmdt, dnodt, domdt, argpo, argpdot, t, tc, gsto, xfact, xlamo, no, atime, em, argpm, inclm, xli, mm, xni, nodem, nm]]:
variable[fasx2] assign[=] constant[0.13130908]
variable[fasx4] assign[=] constant[2.8843198]
variable[fasx6] assign[=] constant[0.37448087]
variable[g22] assign[=] constant[5.7686396]
variable[g32] assign[=] constant[0.95240898]
variable[g44] assign[=] constant[1.8014998]
variable[g52] assign[=] constant[1.050833]
variable[g54] assign[=] constant[4.4108898]
variable[rptim] assign[=] constant[0.0043752690880113]
variable[stepp] assign[=] constant[720.0]
variable[stepn] assign[=] <ast.UnaryOp object at 0x7da1b0cb6890>
variable[step2] assign[=] constant[259200.0]
variable[dndt] assign[=] constant[0.0]
variable[theta] assign[=] binary_operation[binary_operation[name[gsto] + binary_operation[name[tc] * name[rptim]]] <ast.Mod object at 0x7da2590d6920> name[twopi]]
variable[em] assign[=] binary_operation[name[em] + binary_operation[name[dedt] * name[t]]]
variable[inclm] assign[=] binary_operation[name[inclm] + binary_operation[name[didt] * name[t]]]
variable[argpm] assign[=] binary_operation[name[argpm] + binary_operation[name[domdt] * name[t]]]
variable[nodem] assign[=] binary_operation[name[nodem] + binary_operation[name[dnodt] * name[t]]]
variable[mm] assign[=] binary_operation[name[mm] + binary_operation[name[dmdt] * name[t]]]
constant[
// sgp4fix for negative inclinations
// the following if statement should be commented out
// if (inclm < 0.0)
// {
// inclm = -inclm;
// argpm = argpm - pi;
// nodem = nodem + pi;
// }
/* - update resonances : numerical (euler-maclaurin) integration - */
/* ------------------------- epoch restart ---------------------- */
// sgp4fix for propagator problems
// the following integration works for negative time steps and periods
// the specific changes are unknown because the original code was so convoluted
// sgp4fix take out atime = 0.0 and fix for faster operation
]
variable[ft] assign[=] constant[0.0]
if compare[name[irez] not_equal[!=] constant[0]] begin[:]
if <ast.BoolOp object at 0x7da1b0ba6860> begin[:]
variable[atime] assign[=] constant[0.0]
variable[xni] assign[=] name[no]
variable[xli] assign[=] name[xlamo]
if compare[name[t] greater[>] constant[0.0]] begin[:]
variable[delt] assign[=] name[stepp]
variable[iretn] assign[=] constant[381]
variable[iret] assign[=] constant[0]
while compare[name[iretn] equal[==] constant[381]] begin[:]
if compare[name[irez] not_equal[!=] constant[2]] begin[:]
variable[xndt] assign[=] binary_operation[binary_operation[binary_operation[name[del1] * call[name[sin], parameter[binary_operation[name[xli] - name[fasx2]]]]] + binary_operation[name[del2] * call[name[sin], parameter[binary_operation[constant[2.0] * binary_operation[name[xli] - name[fasx4]]]]]]] + binary_operation[name[del3] * call[name[sin], parameter[binary_operation[constant[3.0] * binary_operation[name[xli] - name[fasx6]]]]]]]
variable[xldot] assign[=] binary_operation[name[xni] + name[xfact]]
variable[xnddt] assign[=] binary_operation[binary_operation[binary_operation[name[del1] * call[name[cos], parameter[binary_operation[name[xli] - name[fasx2]]]]] + binary_operation[binary_operation[constant[2.0] * name[del2]] * call[name[cos], parameter[binary_operation[constant[2.0] * binary_operation[name[xli] - name[fasx4]]]]]]] + binary_operation[binary_operation[constant[3.0] * name[del3]] * call[name[cos], parameter[binary_operation[constant[3.0] * binary_operation[name[xli] - name[fasx6]]]]]]]
variable[xnddt] assign[=] binary_operation[name[xnddt] * name[xldot]]
if compare[call[name[fabs], parameter[binary_operation[name[t] - name[atime]]]] greater_or_equal[>=] name[stepp]] begin[:]
variable[iret] assign[=] constant[0]
variable[iretn] assign[=] constant[381]
if compare[name[iretn] equal[==] constant[381]] begin[:]
variable[xli] assign[=] binary_operation[binary_operation[name[xli] + binary_operation[name[xldot] * name[delt]]] + binary_operation[name[xndt] * name[step2]]]
variable[xni] assign[=] binary_operation[binary_operation[name[xni] + binary_operation[name[xndt] * name[delt]]] + binary_operation[name[xnddt] * name[step2]]]
variable[atime] assign[=] binary_operation[name[atime] + name[delt]]
variable[nm] assign[=] binary_operation[binary_operation[name[xni] + binary_operation[name[xndt] * name[ft]]] + binary_operation[binary_operation[binary_operation[name[xnddt] * name[ft]] * name[ft]] * constant[0.5]]]
variable[xl] assign[=] binary_operation[binary_operation[name[xli] + binary_operation[name[xldot] * name[ft]]] + binary_operation[binary_operation[binary_operation[name[xndt] * name[ft]] * name[ft]] * constant[0.5]]]
if compare[name[irez] not_equal[!=] constant[1]] begin[:]
variable[mm] assign[=] binary_operation[binary_operation[name[xl] - binary_operation[constant[2.0] * name[nodem]]] + binary_operation[constant[2.0] * name[theta]]]
variable[dndt] assign[=] binary_operation[name[nm] - name[no]]
variable[nm] assign[=] binary_operation[name[no] + name[dndt]]
return[tuple[[<ast.Name object at 0x7da1b0b9e290>, <ast.Name object at 0x7da1b0b9ff40>, <ast.Name object at 0x7da1b0b9e260>, <ast.Name object at 0x7da1b0b9c1c0>, <ast.Name object at 0x7da1b0b9f3d0>, <ast.Name object at 0x7da1b0b9d600>, <ast.Name object at 0x7da1b0b9fd30>, <ast.Name object at 0x7da1b0b9f6d0>, <ast.Name object at 0x7da1b0b9d0c0>, <ast.Name object at 0x7da1b0b9d660>]]] | keyword[def] identifier[_dspace] (
identifier[irez] ,
identifier[d2201] , identifier[d2211] , identifier[d3210] , identifier[d3222] , identifier[d4410] ,
identifier[d4422] , identifier[d5220] , identifier[d5232] , identifier[d5421] , identifier[d5433] ,
identifier[dedt] , identifier[del1] , identifier[del2] , identifier[del3] , identifier[didt] ,
identifier[dmdt] , identifier[dnodt] , identifier[domdt] , identifier[argpo] , identifier[argpdot] ,
identifier[t] , identifier[tc] , identifier[gsto] , identifier[xfact] , identifier[xlamo] ,
identifier[no] ,
identifier[atime] , identifier[em] , identifier[argpm] , identifier[inclm] , identifier[xli] ,
identifier[mm] , identifier[xni] , identifier[nodem] , identifier[nm] ,
):
identifier[fasx2] = literal[int] ;
identifier[fasx4] = literal[int] ;
identifier[fasx6] = literal[int] ;
identifier[g22] = literal[int] ;
identifier[g32] = literal[int] ;
identifier[g44] = literal[int] ;
identifier[g52] = literal[int] ;
identifier[g54] = literal[int] ;
identifier[rptim] = literal[int] ;
identifier[stepp] = literal[int] ;
identifier[stepn] =- literal[int] ;
identifier[step2] = literal[int] ;
identifier[dndt] = literal[int] ;
identifier[theta] =( identifier[gsto] + identifier[tc] * identifier[rptim] )% identifier[twopi]
identifier[em] = identifier[em] + identifier[dedt] * identifier[t] ;
identifier[inclm] = identifier[inclm] + identifier[didt] * identifier[t] ;
identifier[argpm] = identifier[argpm] + identifier[domdt] * identifier[t] ;
identifier[nodem] = identifier[nodem] + identifier[dnodt] * identifier[t] ;
identifier[mm] = identifier[mm] + identifier[dmdt] * identifier[t] ;
literal[string]
identifier[ft] = literal[int] ;
keyword[if] identifier[irez] != literal[int] :
keyword[if] identifier[atime] == literal[int] keyword[or] identifier[t] * identifier[atime] <= literal[int] keyword[or] identifier[fabs] ( identifier[t] )< identifier[fabs] ( identifier[atime] ):
identifier[atime] = literal[int] ;
identifier[xni] = identifier[no] ;
identifier[xli] = identifier[xlamo] ;
keyword[if] identifier[t] > literal[int] :
identifier[delt] = identifier[stepp] ;
keyword[else] :
identifier[delt] = identifier[stepn] ;
identifier[iretn] = literal[int] ;
identifier[iret] = literal[int] ;
keyword[while] identifier[iretn] == literal[int] :
keyword[if] identifier[irez] != literal[int] :
identifier[xndt] = identifier[del1] * identifier[sin] ( identifier[xli] - identifier[fasx2] )+ identifier[del2] * identifier[sin] ( literal[int] *( identifier[xli] - identifier[fasx4] ))+ identifier[del3] * identifier[sin] ( literal[int] *( identifier[xli] - identifier[fasx6] ));
identifier[xldot] = identifier[xni] + identifier[xfact] ;
identifier[xnddt] = identifier[del1] * identifier[cos] ( identifier[xli] - identifier[fasx2] )+ literal[int] * identifier[del2] * identifier[cos] ( literal[int] *( identifier[xli] - identifier[fasx4] ))+ literal[int] * identifier[del3] * identifier[cos] ( literal[int] *( identifier[xli] - identifier[fasx6] ));
identifier[xnddt] = identifier[xnddt] * identifier[xldot] ;
keyword[else] :
identifier[xomi] = identifier[argpo] + identifier[argpdot] * identifier[atime] ;
identifier[x2omi] = identifier[xomi] + identifier[xomi] ;
identifier[x2li] = identifier[xli] + identifier[xli] ;
identifier[xndt] =( identifier[d2201] * identifier[sin] ( identifier[x2omi] + identifier[xli] - identifier[g22] )+ identifier[d2211] * identifier[sin] ( identifier[xli] - identifier[g22] )+
identifier[d3210] * identifier[sin] ( identifier[xomi] + identifier[xli] - identifier[g32] )+ identifier[d3222] * identifier[sin] (- identifier[xomi] + identifier[xli] - identifier[g32] )+
identifier[d4410] * identifier[sin] ( identifier[x2omi] + identifier[x2li] - identifier[g44] )+ identifier[d4422] * identifier[sin] ( identifier[x2li] - identifier[g44] )+
identifier[d5220] * identifier[sin] ( identifier[xomi] + identifier[xli] - identifier[g52] )+ identifier[d5232] * identifier[sin] (- identifier[xomi] + identifier[xli] - identifier[g52] )+
identifier[d5421] * identifier[sin] ( identifier[xomi] + identifier[x2li] - identifier[g54] )+ identifier[d5433] * identifier[sin] (- identifier[xomi] + identifier[x2li] - identifier[g54] ));
identifier[xldot] = identifier[xni] + identifier[xfact] ;
identifier[xnddt] =( identifier[d2201] * identifier[cos] ( identifier[x2omi] + identifier[xli] - identifier[g22] )+ identifier[d2211] * identifier[cos] ( identifier[xli] - identifier[g22] )+
identifier[d3210] * identifier[cos] ( identifier[xomi] + identifier[xli] - identifier[g32] )+ identifier[d3222] * identifier[cos] (- identifier[xomi] + identifier[xli] - identifier[g32] )+
identifier[d5220] * identifier[cos] ( identifier[xomi] + identifier[xli] - identifier[g52] )+ identifier[d5232] * identifier[cos] (- identifier[xomi] + identifier[xli] - identifier[g52] )+
literal[int] *( identifier[d4410] * identifier[cos] ( identifier[x2omi] + identifier[x2li] - identifier[g44] )+
identifier[d4422] * identifier[cos] ( identifier[x2li] - identifier[g44] )+ identifier[d5421] * identifier[cos] ( identifier[xomi] + identifier[x2li] - identifier[g54] )+
identifier[d5433] * identifier[cos] (- identifier[xomi] + identifier[x2li] - identifier[g54] )));
identifier[xnddt] = identifier[xnddt] * identifier[xldot] ;
keyword[if] identifier[fabs] ( identifier[t] - identifier[atime] )>= identifier[stepp] :
identifier[iret] = literal[int] ;
identifier[iretn] = literal[int] ;
keyword[else] :
identifier[ft] = identifier[t] - identifier[atime] ;
identifier[iretn] = literal[int] ;
keyword[if] identifier[iretn] == literal[int] :
identifier[xli] = identifier[xli] + identifier[xldot] * identifier[delt] + identifier[xndt] * identifier[step2] ;
identifier[xni] = identifier[xni] + identifier[xndt] * identifier[delt] + identifier[xnddt] * identifier[step2] ;
identifier[atime] = identifier[atime] + identifier[delt] ;
identifier[nm] = identifier[xni] + identifier[xndt] * identifier[ft] + identifier[xnddt] * identifier[ft] * identifier[ft] * literal[int] ;
identifier[xl] = identifier[xli] + identifier[xldot] * identifier[ft] + identifier[xndt] * identifier[ft] * identifier[ft] * literal[int] ;
keyword[if] identifier[irez] != literal[int] :
identifier[mm] = identifier[xl] - literal[int] * identifier[nodem] + literal[int] * identifier[theta] ;
identifier[dndt] = identifier[nm] - identifier[no] ;
keyword[else] :
identifier[mm] = identifier[xl] - identifier[nodem] - identifier[argpm] + identifier[theta] ;
identifier[dndt] = identifier[nm] - identifier[no] ;
identifier[nm] = identifier[no] + identifier[dndt] ;
keyword[return] (
identifier[atime] , identifier[em] , identifier[argpm] , identifier[inclm] , identifier[xli] ,
identifier[mm] , identifier[xni] , identifier[nodem] , identifier[dndt] , identifier[nm] ,
) | def _dspace(irez, d2201, d2211, d3210, d3222, d4410, d4422, d5220, d5232, d5421, d5433, dedt, del1, del2, del3, didt, dmdt, dnodt, domdt, argpo, argpdot, t, tc, gsto, xfact, xlamo, no, atime, em, argpm, inclm, xli, mm, xni, nodem, nm):
fasx2 = 0.13130908
fasx4 = 2.8843198
fasx6 = 0.37448087
g22 = 5.7686396
g32 = 0.95240898
g44 = 1.8014998
g52 = 1.050833
g54 = 4.4108898
rptim = 0.0043752690880113 # equates to 7.29211514668855e-5 rad/sec
stepp = 720.0
stepn = -720.0
step2 = 259200.0
# ----------- calculate deep space resonance effects -----------
dndt = 0.0
theta = (gsto + tc * rptim) % twopi
em = em + dedt * t
inclm = inclm + didt * t
argpm = argpm + domdt * t
nodem = nodem + dnodt * t
mm = mm + dmdt * t
'\n // sgp4fix for negative inclinations\n // the following if statement should be commented out\n // if (inclm < 0.0)\n // {\n // inclm = -inclm;\n // argpm = argpm - pi;\n // nodem = nodem + pi;\n // }\n\n /* - update resonances : numerical (euler-maclaurin) integration - */\n /* ------------------------- epoch restart ---------------------- */\n // sgp4fix for propagator problems\n // the following integration works for negative time steps and periods\n // the specific changes are unknown because the original code was so convoluted\n\n // sgp4fix take out atime = 0.0 and fix for faster operation\n '
ft = 0.0
if irez != 0:
# sgp4fix streamline check
if atime == 0.0 or t * atime <= 0.0 or fabs(t) < fabs(atime):
atime = 0.0
xni = no
xli = xlamo # depends on [control=['if'], data=[]]
# sgp4fix move check outside loop
if t > 0.0:
delt = stepp # depends on [control=['if'], data=[]]
else:
delt = stepn
iretn = 381 # added for do loop
iret = 0 # added for loop
while iretn == 381:
# ------------------- dot terms calculated -------------
# ----------- near - synchronous resonance terms -------
if irez != 2:
xndt = del1 * sin(xli - fasx2) + del2 * sin(2.0 * (xli - fasx4)) + del3 * sin(3.0 * (xli - fasx6))
xldot = xni + xfact
xnddt = del1 * cos(xli - fasx2) + 2.0 * del2 * cos(2.0 * (xli - fasx4)) + 3.0 * del3 * cos(3.0 * (xli - fasx6))
xnddt = xnddt * xldot # depends on [control=['if'], data=[]]
else:
# --------- near - half-day resonance terms --------
xomi = argpo + argpdot * atime
x2omi = xomi + xomi
x2li = xli + xli
xndt = d2201 * sin(x2omi + xli - g22) + d2211 * sin(xli - g22) + d3210 * sin(xomi + xli - g32) + d3222 * sin(-xomi + xli - g32) + d4410 * sin(x2omi + x2li - g44) + d4422 * sin(x2li - g44) + d5220 * sin(xomi + xli - g52) + d5232 * sin(-xomi + xli - g52) + d5421 * sin(xomi + x2li - g54) + d5433 * sin(-xomi + x2li - g54)
xldot = xni + xfact
xnddt = d2201 * cos(x2omi + xli - g22) + d2211 * cos(xli - g22) + d3210 * cos(xomi + xli - g32) + d3222 * cos(-xomi + xli - g32) + d5220 * cos(xomi + xli - g52) + d5232 * cos(-xomi + xli - g52) + 2.0 * (d4410 * cos(x2omi + x2li - g44) + d4422 * cos(x2li - g44) + d5421 * cos(xomi + x2li - g54) + d5433 * cos(-xomi + x2li - g54))
xnddt = xnddt * xldot
# ----------------------- integrator -------------------
# sgp4fix move end checks to end of routine
if fabs(t - atime) >= stepp:
iret = 0
iretn = 381 # depends on [control=['if'], data=[]]
else:
ft = t - atime
iretn = 0
if iretn == 381:
xli = xli + xldot * delt + xndt * step2
xni = xni + xndt * delt + xnddt * step2
atime = atime + delt # depends on [control=['if'], data=[]] # depends on [control=['while'], data=['iretn']]
nm = xni + xndt * ft + xnddt * ft * ft * 0.5
xl = xli + xldot * ft + xndt * ft * ft * 0.5
if irez != 1:
mm = xl - 2.0 * nodem + 2.0 * theta
dndt = nm - no # depends on [control=['if'], data=[]]
else:
mm = xl - nodem - argpm + theta
dndt = nm - no
nm = no + dndt # depends on [control=['if'], data=['irez']]
return (atime, em, argpm, inclm, xli, mm, xni, nodem, dndt, nm) |
def set_identifier(self, uid):
"""
Sets unique id for this epub
:Args:
- uid: Value of unique identifier for this book
"""
self.uid = uid
self.set_unique_metadata('DC', 'identifier', self.uid, {'id': self.IDENTIFIER_ID}) | def function[set_identifier, parameter[self, uid]]:
constant[
Sets unique id for this epub
:Args:
- uid: Value of unique identifier for this book
]
name[self].uid assign[=] name[uid]
call[name[self].set_unique_metadata, parameter[constant[DC], constant[identifier], name[self].uid, dictionary[[<ast.Constant object at 0x7da18f58dd50>], [<ast.Attribute object at 0x7da18f58e620>]]]] | keyword[def] identifier[set_identifier] ( identifier[self] , identifier[uid] ):
literal[string]
identifier[self] . identifier[uid] = identifier[uid]
identifier[self] . identifier[set_unique_metadata] ( literal[string] , literal[string] , identifier[self] . identifier[uid] ,{ literal[string] : identifier[self] . identifier[IDENTIFIER_ID] }) | def set_identifier(self, uid):
"""
Sets unique id for this epub
:Args:
- uid: Value of unique identifier for this book
"""
self.uid = uid
self.set_unique_metadata('DC', 'identifier', self.uid, {'id': self.IDENTIFIER_ID}) |
def get_vm_by_property(service_instance, name, datacenter=None, vm_properties=None,
traversal_spec=None, parent_ref=None):
'''
Get virtual machine properties based on the traversal specs and properties list,
returns Virtual Machine object with properties.
service_instance
Service instance object to access vCenter
name
Name of the virtual machine.
datacenter
Datacenter name
vm_properties
List of vm properties.
traversal_spec
Traversal Spec object(s) for searching.
parent_ref
Container Reference object for searching under a given object.
'''
if datacenter and not parent_ref:
parent_ref = salt.utils.vmware.get_datacenter(service_instance, datacenter)
if not vm_properties:
vm_properties = ['name',
'config.hardware.device',
'summary.storage.committed',
'summary.storage.uncommitted',
'summary.storage.unshared',
'layoutEx.file',
'config.guestFullName',
'config.guestId',
'guest.net',
'config.hardware.memoryMB',
'config.hardware.numCPU',
'config.files.vmPathName',
'summary.runtime.powerState',
'guest.toolsStatus']
vm_list = salt.utils.vmware.get_mors_with_properties(service_instance,
vim.VirtualMachine,
vm_properties,
container_ref=parent_ref,
traversal_spec=traversal_spec)
vm_formatted = [vm for vm in vm_list if vm['name'] == name]
if not vm_formatted:
raise salt.exceptions.VMwareObjectRetrievalError('The virtual machine was not found.')
elif len(vm_formatted) > 1:
raise salt.exceptions.VMwareMultipleObjectsError(' '.join([
'Multiple virtual machines were found with the'
'same name, please specify a container.']))
return vm_formatted[0] | def function[get_vm_by_property, parameter[service_instance, name, datacenter, vm_properties, traversal_spec, parent_ref]]:
constant[
Get virtual machine properties based on the traversal specs and properties list,
returns Virtual Machine object with properties.
service_instance
Service instance object to access vCenter
name
Name of the virtual machine.
datacenter
Datacenter name
vm_properties
List of vm properties.
traversal_spec
Traversal Spec object(s) for searching.
parent_ref
Container Reference object for searching under a given object.
]
if <ast.BoolOp object at 0x7da1b1ca7790> begin[:]
variable[parent_ref] assign[=] call[name[salt].utils.vmware.get_datacenter, parameter[name[service_instance], name[datacenter]]]
if <ast.UnaryOp object at 0x7da1b1ca58a0> begin[:]
variable[vm_properties] assign[=] list[[<ast.Constant object at 0x7da1b1ca7850>, <ast.Constant object at 0x7da1b1ca4970>, <ast.Constant object at 0x7da1b1ca49a0>, <ast.Constant object at 0x7da1b1ca5480>, <ast.Constant object at 0x7da1b1ca6560>, <ast.Constant object at 0x7da1b1ca4130>, <ast.Constant object at 0x7da1b1ca4640>, <ast.Constant object at 0x7da1b1ca45b0>, <ast.Constant object at 0x7da1b1ca4670>, <ast.Constant object at 0x7da1b1ca40a0>, <ast.Constant object at 0x7da1b1ca4700>, <ast.Constant object at 0x7da1b1ca4160>, <ast.Constant object at 0x7da1b1ca47c0>, <ast.Constant object at 0x7da1b1ca5090>]]
variable[vm_list] assign[=] call[name[salt].utils.vmware.get_mors_with_properties, parameter[name[service_instance], name[vim].VirtualMachine, name[vm_properties]]]
variable[vm_formatted] assign[=] <ast.ListComp object at 0x7da1b1ca7070>
if <ast.UnaryOp object at 0x7da1b1ca5540> begin[:]
<ast.Raise object at 0x7da1b1ca4eb0>
return[call[name[vm_formatted]][constant[0]]] | keyword[def] identifier[get_vm_by_property] ( identifier[service_instance] , identifier[name] , identifier[datacenter] = keyword[None] , identifier[vm_properties] = keyword[None] ,
identifier[traversal_spec] = keyword[None] , identifier[parent_ref] = keyword[None] ):
literal[string]
keyword[if] identifier[datacenter] keyword[and] keyword[not] identifier[parent_ref] :
identifier[parent_ref] = identifier[salt] . identifier[utils] . identifier[vmware] . identifier[get_datacenter] ( identifier[service_instance] , identifier[datacenter] )
keyword[if] keyword[not] identifier[vm_properties] :
identifier[vm_properties] =[ literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ]
identifier[vm_list] = identifier[salt] . identifier[utils] . identifier[vmware] . identifier[get_mors_with_properties] ( identifier[service_instance] ,
identifier[vim] . identifier[VirtualMachine] ,
identifier[vm_properties] ,
identifier[container_ref] = identifier[parent_ref] ,
identifier[traversal_spec] = identifier[traversal_spec] )
identifier[vm_formatted] =[ identifier[vm] keyword[for] identifier[vm] keyword[in] identifier[vm_list] keyword[if] identifier[vm] [ literal[string] ]== identifier[name] ]
keyword[if] keyword[not] identifier[vm_formatted] :
keyword[raise] identifier[salt] . identifier[exceptions] . identifier[VMwareObjectRetrievalError] ( literal[string] )
keyword[elif] identifier[len] ( identifier[vm_formatted] )> literal[int] :
keyword[raise] identifier[salt] . identifier[exceptions] . identifier[VMwareMultipleObjectsError] ( literal[string] . identifier[join] ([
literal[string]
literal[string] ]))
keyword[return] identifier[vm_formatted] [ literal[int] ] | def get_vm_by_property(service_instance, name, datacenter=None, vm_properties=None, traversal_spec=None, parent_ref=None):
"""
Get virtual machine properties based on the traversal specs and properties list,
returns Virtual Machine object with properties.
service_instance
Service instance object to access vCenter
name
Name of the virtual machine.
datacenter
Datacenter name
vm_properties
List of vm properties.
traversal_spec
Traversal Spec object(s) for searching.
parent_ref
Container Reference object for searching under a given object.
"""
if datacenter and (not parent_ref):
parent_ref = salt.utils.vmware.get_datacenter(service_instance, datacenter) # depends on [control=['if'], data=[]]
if not vm_properties:
vm_properties = ['name', 'config.hardware.device', 'summary.storage.committed', 'summary.storage.uncommitted', 'summary.storage.unshared', 'layoutEx.file', 'config.guestFullName', 'config.guestId', 'guest.net', 'config.hardware.memoryMB', 'config.hardware.numCPU', 'config.files.vmPathName', 'summary.runtime.powerState', 'guest.toolsStatus'] # depends on [control=['if'], data=[]]
vm_list = salt.utils.vmware.get_mors_with_properties(service_instance, vim.VirtualMachine, vm_properties, container_ref=parent_ref, traversal_spec=traversal_spec)
vm_formatted = [vm for vm in vm_list if vm['name'] == name]
if not vm_formatted:
raise salt.exceptions.VMwareObjectRetrievalError('The virtual machine was not found.') # depends on [control=['if'], data=[]]
elif len(vm_formatted) > 1:
raise salt.exceptions.VMwareMultipleObjectsError(' '.join(['Multiple virtual machines were found with thesame name, please specify a container.'])) # depends on [control=['if'], data=[]]
return vm_formatted[0] |
def get_subject(self, lang=None):
""" Get the subject of the object
:param lang: Lang to retrieve
:return: Subject string representation
:rtype: Literal
"""
return self.metadata.get_single(key=DC.subject, lang=lang) | def function[get_subject, parameter[self, lang]]:
constant[ Get the subject of the object
:param lang: Lang to retrieve
:return: Subject string representation
:rtype: Literal
]
return[call[name[self].metadata.get_single, parameter[]]] | keyword[def] identifier[get_subject] ( identifier[self] , identifier[lang] = keyword[None] ):
literal[string]
keyword[return] identifier[self] . identifier[metadata] . identifier[get_single] ( identifier[key] = identifier[DC] . identifier[subject] , identifier[lang] = identifier[lang] ) | def get_subject(self, lang=None):
""" Get the subject of the object
:param lang: Lang to retrieve
:return: Subject string representation
:rtype: Literal
"""
return self.metadata.get_single(key=DC.subject, lang=lang) |
def from_dict(d):
"""
Recreate a KrausModel from the dictionary representation.
:param dict d: The dictionary representing the KrausModel. See `to_dict` for an
example.
:return: The deserialized KrausModel.
:rtype: KrausModel
"""
kraus_ops = [KrausModel.unpack_kraus_matrix(k) for k in d['kraus_ops']]
return KrausModel(d['gate'], d['params'], d['targets'], kraus_ops, d['fidelity']) | def function[from_dict, parameter[d]]:
constant[
Recreate a KrausModel from the dictionary representation.
:param dict d: The dictionary representing the KrausModel. See `to_dict` for an
example.
:return: The deserialized KrausModel.
:rtype: KrausModel
]
variable[kraus_ops] assign[=] <ast.ListComp object at 0x7da1b1bbb0a0>
return[call[name[KrausModel], parameter[call[name[d]][constant[gate]], call[name[d]][constant[params]], call[name[d]][constant[targets]], name[kraus_ops], call[name[d]][constant[fidelity]]]]] | keyword[def] identifier[from_dict] ( identifier[d] ):
literal[string]
identifier[kraus_ops] =[ identifier[KrausModel] . identifier[unpack_kraus_matrix] ( identifier[k] ) keyword[for] identifier[k] keyword[in] identifier[d] [ literal[string] ]]
keyword[return] identifier[KrausModel] ( identifier[d] [ literal[string] ], identifier[d] [ literal[string] ], identifier[d] [ literal[string] ], identifier[kraus_ops] , identifier[d] [ literal[string] ]) | def from_dict(d):
"""
Recreate a KrausModel from the dictionary representation.
:param dict d: The dictionary representing the KrausModel. See `to_dict` for an
example.
:return: The deserialized KrausModel.
:rtype: KrausModel
"""
kraus_ops = [KrausModel.unpack_kraus_matrix(k) for k in d['kraus_ops']]
return KrausModel(d['gate'], d['params'], d['targets'], kraus_ops, d['fidelity']) |
def _permute_one_sample_iscs(iscs, group_parameters, i, pairwise=False,
summary_statistic='median', group_matrix=None,
exact_permutations=None, prng=None):
"""Applies one-sample permutations to ISC data
Input ISCs should be n_subjects (leave-one-out approach) or
n_pairs (pairwise approach) by n_voxels or n_ROIs array.
This function is only intended to be used internally by the
permutation_isc function in this module.
Parameters
----------
iscs : ndarray or list
ISC values
group_parameters : dict
Dictionary of group parameters
i : int
Permutation iteration
pairwise : bool, default: False
Indicator of pairwise or leave-one-out, should match ISCs variable
summary_statistic : str, default: 'median'
Summary statistic, either 'median' (default) or 'mean'
exact_permutations : list
List of permutations
prng = None or np.random.RandomState, default: None
Initial random seed
Returns
-------
isc_sample : ndarray
Array of permuted ISC values
"""
# Randomized sign-flips
if exact_permutations:
sign_flipper = np.array(exact_permutations[i])
else:
sign_flipper = prng.choice([-1, 1],
size=group_parameters['n_subjects'],
replace=True)
# If pairwise, apply sign-flips by rows and columns
if pairwise:
matrix_flipped = (group_parameters['group_matrix'] * sign_flipper
* sign_flipper[
:, np.newaxis])
sign_flipper = squareform(matrix_flipped, checks=False)
# Apply flips along ISC axis (same across voxels)
isc_flipped = iscs * sign_flipper[:, np.newaxis]
# Get summary statistics on sign-flipped ISCs
isc_sample = compute_summary_statistic(
isc_flipped,
summary_statistic=summary_statistic,
axis=0)
return isc_sample | def function[_permute_one_sample_iscs, parameter[iscs, group_parameters, i, pairwise, summary_statistic, group_matrix, exact_permutations, prng]]:
constant[Applies one-sample permutations to ISC data
Input ISCs should be n_subjects (leave-one-out approach) or
n_pairs (pairwise approach) by n_voxels or n_ROIs array.
This function is only intended to be used internally by the
permutation_isc function in this module.
Parameters
----------
iscs : ndarray or list
ISC values
group_parameters : dict
Dictionary of group parameters
i : int
Permutation iteration
pairwise : bool, default: False
Indicator of pairwise or leave-one-out, should match ISCs variable
summary_statistic : str, default: 'median'
Summary statistic, either 'median' (default) or 'mean'
exact_permutations : list
List of permutations
prng = None or np.random.RandomState, default: None
Initial random seed
Returns
-------
isc_sample : ndarray
Array of permuted ISC values
]
if name[exact_permutations] begin[:]
variable[sign_flipper] assign[=] call[name[np].array, parameter[call[name[exact_permutations]][name[i]]]]
if name[pairwise] begin[:]
variable[matrix_flipped] assign[=] binary_operation[binary_operation[call[name[group_parameters]][constant[group_matrix]] * name[sign_flipper]] * call[name[sign_flipper]][tuple[[<ast.Slice object at 0x7da1b0779120>, <ast.Attribute object at 0x7da1b07792a0>]]]]
variable[sign_flipper] assign[=] call[name[squareform], parameter[name[matrix_flipped]]]
variable[isc_flipped] assign[=] binary_operation[name[iscs] * call[name[sign_flipper]][tuple[[<ast.Slice object at 0x7da1b074ebc0>, <ast.Attribute object at 0x7da1b074eb30>]]]]
variable[isc_sample] assign[=] call[name[compute_summary_statistic], parameter[name[isc_flipped]]]
return[name[isc_sample]] | keyword[def] identifier[_permute_one_sample_iscs] ( identifier[iscs] , identifier[group_parameters] , identifier[i] , identifier[pairwise] = keyword[False] ,
identifier[summary_statistic] = literal[string] , identifier[group_matrix] = keyword[None] ,
identifier[exact_permutations] = keyword[None] , identifier[prng] = keyword[None] ):
literal[string]
keyword[if] identifier[exact_permutations] :
identifier[sign_flipper] = identifier[np] . identifier[array] ( identifier[exact_permutations] [ identifier[i] ])
keyword[else] :
identifier[sign_flipper] = identifier[prng] . identifier[choice] ([- literal[int] , literal[int] ],
identifier[size] = identifier[group_parameters] [ literal[string] ],
identifier[replace] = keyword[True] )
keyword[if] identifier[pairwise] :
identifier[matrix_flipped] =( identifier[group_parameters] [ literal[string] ]* identifier[sign_flipper]
* identifier[sign_flipper] [
:, identifier[np] . identifier[newaxis] ])
identifier[sign_flipper] = identifier[squareform] ( identifier[matrix_flipped] , identifier[checks] = keyword[False] )
identifier[isc_flipped] = identifier[iscs] * identifier[sign_flipper] [:, identifier[np] . identifier[newaxis] ]
identifier[isc_sample] = identifier[compute_summary_statistic] (
identifier[isc_flipped] ,
identifier[summary_statistic] = identifier[summary_statistic] ,
identifier[axis] = literal[int] )
keyword[return] identifier[isc_sample] | def _permute_one_sample_iscs(iscs, group_parameters, i, pairwise=False, summary_statistic='median', group_matrix=None, exact_permutations=None, prng=None):
"""Applies one-sample permutations to ISC data
Input ISCs should be n_subjects (leave-one-out approach) or
n_pairs (pairwise approach) by n_voxels or n_ROIs array.
This function is only intended to be used internally by the
permutation_isc function in this module.
Parameters
----------
iscs : ndarray or list
ISC values
group_parameters : dict
Dictionary of group parameters
i : int
Permutation iteration
pairwise : bool, default: False
Indicator of pairwise or leave-one-out, should match ISCs variable
summary_statistic : str, default: 'median'
Summary statistic, either 'median' (default) or 'mean'
exact_permutations : list
List of permutations
prng = None or np.random.RandomState, default: None
Initial random seed
Returns
-------
isc_sample : ndarray
Array of permuted ISC values
"""
# Randomized sign-flips
if exact_permutations:
sign_flipper = np.array(exact_permutations[i]) # depends on [control=['if'], data=[]]
else:
sign_flipper = prng.choice([-1, 1], size=group_parameters['n_subjects'], replace=True)
# If pairwise, apply sign-flips by rows and columns
if pairwise:
matrix_flipped = group_parameters['group_matrix'] * sign_flipper * sign_flipper[:, np.newaxis]
sign_flipper = squareform(matrix_flipped, checks=False) # depends on [control=['if'], data=[]]
# Apply flips along ISC axis (same across voxels)
isc_flipped = iscs * sign_flipper[:, np.newaxis]
# Get summary statistics on sign-flipped ISCs
isc_sample = compute_summary_statistic(isc_flipped, summary_statistic=summary_statistic, axis=0)
return isc_sample |
def annotateText(self, text, layer, np_labels = None):
''' Applies this chunker on given Text, and adds results of
the chunking as a new annotation layer to the text.
If the NP annotations are provided (via the input list
*np_labels*), uses the given NP annotations, otherwise
produces new NP_LABEL annotations via the method
self.analyze_text();
Parameters
----------
text: estnltk.text.Text
The input text where the new layer of NP chunking
annotations is to be added;
layer: str
Name of the new layer;
np_labels : list of str
Optional: A list of strings, containing a B-I-O label
for each word in *text*; If provided, uses annotations
from *np_labels*, otherwise creates new annotations
with this chunker;
Returns
-------
text
The input text where a new layer (containing NP
annotations) has been added;
'''
input_words = None
if isinstance(text, Text):
# input is Text
input_words = text.words
else:
raise Exception(' Input text should be of type Text, but it is ', text)
phrases = []
# If NP_LABEL-s are not provided, text needs to be analyzed first:
if not np_labels:
np_labels = self.analyze_text( text, return_type="labels" )
if len(input_words) != len(np_labels):
raise Exception(' (!) Number of words ('+str(len(input_words))+\
') does not match number of labels '+str(len(np_labels)))
# Fetch NP chunks
phrases = self.get_phrases( text, np_labels )
# Create and attach annotations to the Text object
annotations = []
if phrases:
for phrase in phrases:
phrase_annotation = {}
phrase_annotation[START] = phrase[0][START]
phrase_annotation[END] = phrase[-1][END]
phrase_annotation[TEXT] = ' '.join([word[TEXT] for word in phrase ])
annotations.append( phrase_annotation )
text[layer] = annotations
return text | def function[annotateText, parameter[self, text, layer, np_labels]]:
constant[ Applies this chunker on given Text, and adds results of
the chunking as a new annotation layer to the text.
If the NP annotations are provided (via the input list
*np_labels*), uses the given NP annotations, otherwise
produces new NP_LABEL annotations via the method
self.analyze_text();
Parameters
----------
text: estnltk.text.Text
The input text where the new layer of NP chunking
annotations is to be added;
layer: str
Name of the new layer;
np_labels : list of str
Optional: A list of strings, containing a B-I-O label
for each word in *text*; If provided, uses annotations
from *np_labels*, otherwise creates new annotations
with this chunker;
Returns
-------
text
The input text where a new layer (containing NP
annotations) has been added;
]
variable[input_words] assign[=] constant[None]
if call[name[isinstance], parameter[name[text], name[Text]]] begin[:]
variable[input_words] assign[=] name[text].words
variable[phrases] assign[=] list[[]]
if <ast.UnaryOp object at 0x7da18f09ea70> begin[:]
variable[np_labels] assign[=] call[name[self].analyze_text, parameter[name[text]]]
if compare[call[name[len], parameter[name[input_words]]] not_equal[!=] call[name[len], parameter[name[np_labels]]]] begin[:]
<ast.Raise object at 0x7da18f09d900>
variable[phrases] assign[=] call[name[self].get_phrases, parameter[name[text], name[np_labels]]]
variable[annotations] assign[=] list[[]]
if name[phrases] begin[:]
for taget[name[phrase]] in starred[name[phrases]] begin[:]
variable[phrase_annotation] assign[=] dictionary[[], []]
call[name[phrase_annotation]][name[START]] assign[=] call[call[name[phrase]][constant[0]]][name[START]]
call[name[phrase_annotation]][name[END]] assign[=] call[call[name[phrase]][<ast.UnaryOp object at 0x7da18f09d000>]][name[END]]
call[name[phrase_annotation]][name[TEXT]] assign[=] call[constant[ ].join, parameter[<ast.ListComp object at 0x7da18f09d960>]]
call[name[annotations].append, parameter[name[phrase_annotation]]]
call[name[text]][name[layer]] assign[=] name[annotations]
return[name[text]] | keyword[def] identifier[annotateText] ( identifier[self] , identifier[text] , identifier[layer] , identifier[np_labels] = keyword[None] ):
literal[string]
identifier[input_words] = keyword[None]
keyword[if] identifier[isinstance] ( identifier[text] , identifier[Text] ):
identifier[input_words] = identifier[text] . identifier[words]
keyword[else] :
keyword[raise] identifier[Exception] ( literal[string] , identifier[text] )
identifier[phrases] =[]
keyword[if] keyword[not] identifier[np_labels] :
identifier[np_labels] = identifier[self] . identifier[analyze_text] ( identifier[text] , identifier[return_type] = literal[string] )
keyword[if] identifier[len] ( identifier[input_words] )!= identifier[len] ( identifier[np_labels] ):
keyword[raise] identifier[Exception] ( literal[string] + identifier[str] ( identifier[len] ( identifier[input_words] ))+ literal[string] + identifier[str] ( identifier[len] ( identifier[np_labels] )))
identifier[phrases] = identifier[self] . identifier[get_phrases] ( identifier[text] , identifier[np_labels] )
identifier[annotations] =[]
keyword[if] identifier[phrases] :
keyword[for] identifier[phrase] keyword[in] identifier[phrases] :
identifier[phrase_annotation] ={}
identifier[phrase_annotation] [ identifier[START] ]= identifier[phrase] [ literal[int] ][ identifier[START] ]
identifier[phrase_annotation] [ identifier[END] ]= identifier[phrase] [- literal[int] ][ identifier[END] ]
identifier[phrase_annotation] [ identifier[TEXT] ]= literal[string] . identifier[join] ([ identifier[word] [ identifier[TEXT] ] keyword[for] identifier[word] keyword[in] identifier[phrase] ])
identifier[annotations] . identifier[append] ( identifier[phrase_annotation] )
identifier[text] [ identifier[layer] ]= identifier[annotations]
keyword[return] identifier[text] | def annotateText(self, text, layer, np_labels=None):
""" Applies this chunker on given Text, and adds results of
the chunking as a new annotation layer to the text.
If the NP annotations are provided (via the input list
*np_labels*), uses the given NP annotations, otherwise
produces new NP_LABEL annotations via the method
self.analyze_text();
Parameters
----------
text: estnltk.text.Text
The input text where the new layer of NP chunking
annotations is to be added;
layer: str
Name of the new layer;
np_labels : list of str
Optional: A list of strings, containing a B-I-O label
for each word in *text*; If provided, uses annotations
from *np_labels*, otherwise creates new annotations
with this chunker;
Returns
-------
text
The input text where a new layer (containing NP
annotations) has been added;
"""
input_words = None
if isinstance(text, Text): # input is Text
input_words = text.words # depends on [control=['if'], data=[]]
else:
raise Exception(' Input text should be of type Text, but it is ', text)
phrases = [] # If NP_LABEL-s are not provided, text needs to be analyzed first:
if not np_labels:
np_labels = self.analyze_text(text, return_type='labels') # depends on [control=['if'], data=[]]
if len(input_words) != len(np_labels):
raise Exception(' (!) Number of words (' + str(len(input_words)) + ') does not match number of labels ' + str(len(np_labels))) # depends on [control=['if'], data=[]] # Fetch NP chunks
phrases = self.get_phrases(text, np_labels) # Create and attach annotations to the Text object
annotations = []
if phrases:
for phrase in phrases:
phrase_annotation = {}
phrase_annotation[START] = phrase[0][START]
phrase_annotation[END] = phrase[-1][END]
phrase_annotation[TEXT] = ' '.join([word[TEXT] for word in phrase])
annotations.append(phrase_annotation) # depends on [control=['for'], data=['phrase']] # depends on [control=['if'], data=[]]
text[layer] = annotations
return text |
def retrieve_document(file_path, directory='sec_filings'):
'''
This function takes a file path beginning with edgar and stores the form in a directory.
The default directory is sec_filings but can be changed through a keyword argument.
'''
ftp = FTP('ftp.sec.gov', timeout=None)
ftp.login()
name = file_path.replace('/', '_')
if not os.path.exists(directory):
os.makedirs(directory)
with tempfile.TemporaryFile() as temp:
ftp.retrbinary('RETR %s' % file_path, temp.write)
temp.seek(0)
with open('{}/{}'.format(directory, name), 'w+') as f:
f.write(temp.read().decode("utf-8"))
f.closed
records = temp
retry = False
ftp.close() | def function[retrieve_document, parameter[file_path, directory]]:
constant[
This function takes a file path beginning with edgar and stores the form in a directory.
The default directory is sec_filings but can be changed through a keyword argument.
]
variable[ftp] assign[=] call[name[FTP], parameter[constant[ftp.sec.gov]]]
call[name[ftp].login, parameter[]]
variable[name] assign[=] call[name[file_path].replace, parameter[constant[/], constant[_]]]
if <ast.UnaryOp object at 0x7da2054a71c0> begin[:]
call[name[os].makedirs, parameter[name[directory]]]
with call[name[tempfile].TemporaryFile, parameter[]] begin[:]
call[name[ftp].retrbinary, parameter[binary_operation[constant[RETR %s] <ast.Mod object at 0x7da2590d6920> name[file_path]], name[temp].write]]
call[name[temp].seek, parameter[constant[0]]]
with call[name[open], parameter[call[constant[{}/{}].format, parameter[name[directory], name[name]]], constant[w+]]] begin[:]
call[name[f].write, parameter[call[call[name[temp].read, parameter[]].decode, parameter[constant[utf-8]]]]]
name[f].closed
variable[records] assign[=] name[temp]
variable[retry] assign[=] constant[False]
call[name[ftp].close, parameter[]] | keyword[def] identifier[retrieve_document] ( identifier[file_path] , identifier[directory] = literal[string] ):
literal[string]
identifier[ftp] = identifier[FTP] ( literal[string] , identifier[timeout] = keyword[None] )
identifier[ftp] . identifier[login] ()
identifier[name] = identifier[file_path] . identifier[replace] ( literal[string] , literal[string] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[directory] ):
identifier[os] . identifier[makedirs] ( identifier[directory] )
keyword[with] identifier[tempfile] . identifier[TemporaryFile] () keyword[as] identifier[temp] :
identifier[ftp] . identifier[retrbinary] ( literal[string] % identifier[file_path] , identifier[temp] . identifier[write] )
identifier[temp] . identifier[seek] ( literal[int] )
keyword[with] identifier[open] ( literal[string] . identifier[format] ( identifier[directory] , identifier[name] ), literal[string] ) keyword[as] identifier[f] :
identifier[f] . identifier[write] ( identifier[temp] . identifier[read] (). identifier[decode] ( literal[string] ))
identifier[f] . identifier[closed]
identifier[records] = identifier[temp]
identifier[retry] = keyword[False]
identifier[ftp] . identifier[close] () | def retrieve_document(file_path, directory='sec_filings'):
"""
This function takes a file path beginning with edgar and stores the form in a directory.
The default directory is sec_filings but can be changed through a keyword argument.
"""
ftp = FTP('ftp.sec.gov', timeout=None)
ftp.login()
name = file_path.replace('/', '_')
if not os.path.exists(directory):
os.makedirs(directory) # depends on [control=['if'], data=[]]
with tempfile.TemporaryFile() as temp:
ftp.retrbinary('RETR %s' % file_path, temp.write)
temp.seek(0)
with open('{}/{}'.format(directory, name), 'w+') as f:
f.write(temp.read().decode('utf-8')) # depends on [control=['with'], data=['f']]
f.closed
records = temp
retry = False # depends on [control=['with'], data=['temp']]
ftp.close() |
def template(self, template_filename, **kwargs):
"""
Render a template
:params template: Template name
:params kwargs: Template parameters
"""
template = renderer.get_template(template_filename)
kwargs["gns3_version"] = __version__
kwargs["gns3_host"] = self._request.host
self.html(template.render(**kwargs)) | def function[template, parameter[self, template_filename]]:
constant[
Render a template
:params template: Template name
:params kwargs: Template parameters
]
variable[template] assign[=] call[name[renderer].get_template, parameter[name[template_filename]]]
call[name[kwargs]][constant[gns3_version]] assign[=] name[__version__]
call[name[kwargs]][constant[gns3_host]] assign[=] name[self]._request.host
call[name[self].html, parameter[call[name[template].render, parameter[]]]] | keyword[def] identifier[template] ( identifier[self] , identifier[template_filename] ,** identifier[kwargs] ):
literal[string]
identifier[template] = identifier[renderer] . identifier[get_template] ( identifier[template_filename] )
identifier[kwargs] [ literal[string] ]= identifier[__version__]
identifier[kwargs] [ literal[string] ]= identifier[self] . identifier[_request] . identifier[host]
identifier[self] . identifier[html] ( identifier[template] . identifier[render] (** identifier[kwargs] )) | def template(self, template_filename, **kwargs):
"""
Render a template
:params template: Template name
:params kwargs: Template parameters
"""
template = renderer.get_template(template_filename)
kwargs['gns3_version'] = __version__
kwargs['gns3_host'] = self._request.host
self.html(template.render(**kwargs)) |
def type(self):
"""Enum type used for field."""
if self.__type is None:
found_type = find_definition(
self.__type_name, self.message_definition())
if not (found_type is not Enum and
isinstance(found_type, type) and
issubclass(found_type, Enum)):
raise FieldDefinitionError(
'Invalid enum type: %s' % found_type)
self.__type = found_type
return self.__type | def function[type, parameter[self]]:
constant[Enum type used for field.]
if compare[name[self].__type is constant[None]] begin[:]
variable[found_type] assign[=] call[name[find_definition], parameter[name[self].__type_name, call[name[self].message_definition, parameter[]]]]
if <ast.UnaryOp object at 0x7da1b080a020> begin[:]
<ast.Raise object at 0x7da1b0809f90>
name[self].__type assign[=] name[found_type]
return[name[self].__type] | keyword[def] identifier[type] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[__type] keyword[is] keyword[None] :
identifier[found_type] = identifier[find_definition] (
identifier[self] . identifier[__type_name] , identifier[self] . identifier[message_definition] ())
keyword[if] keyword[not] ( identifier[found_type] keyword[is] keyword[not] identifier[Enum] keyword[and]
identifier[isinstance] ( identifier[found_type] , identifier[type] ) keyword[and]
identifier[issubclass] ( identifier[found_type] , identifier[Enum] )):
keyword[raise] identifier[FieldDefinitionError] (
literal[string] % identifier[found_type] )
identifier[self] . identifier[__type] = identifier[found_type]
keyword[return] identifier[self] . identifier[__type] | def type(self):
"""Enum type used for field."""
if self.__type is None:
found_type = find_definition(self.__type_name, self.message_definition())
if not (found_type is not Enum and isinstance(found_type, type) and issubclass(found_type, Enum)):
raise FieldDefinitionError('Invalid enum type: %s' % found_type) # depends on [control=['if'], data=[]]
self.__type = found_type # depends on [control=['if'], data=[]]
return self.__type |
def find_task_descriptor(self, task_id):
"""Returns the task_descriptor corresponding to task_id."""
# It is not guaranteed that the index will be task_id - 1 when --tasks is
# used with a min/max range.
for task_descriptor in self.task_descriptors:
if task_descriptor.task_metadata.get('task-id') == task_id:
return task_descriptor
return None | def function[find_task_descriptor, parameter[self, task_id]]:
constant[Returns the task_descriptor corresponding to task_id.]
for taget[name[task_descriptor]] in starred[name[self].task_descriptors] begin[:]
if compare[call[name[task_descriptor].task_metadata.get, parameter[constant[task-id]]] equal[==] name[task_id]] begin[:]
return[name[task_descriptor]]
return[constant[None]] | keyword[def] identifier[find_task_descriptor] ( identifier[self] , identifier[task_id] ):
literal[string]
keyword[for] identifier[task_descriptor] keyword[in] identifier[self] . identifier[task_descriptors] :
keyword[if] identifier[task_descriptor] . identifier[task_metadata] . identifier[get] ( literal[string] )== identifier[task_id] :
keyword[return] identifier[task_descriptor]
keyword[return] keyword[None] | def find_task_descriptor(self, task_id):
"""Returns the task_descriptor corresponding to task_id."""
# It is not guaranteed that the index will be task_id - 1 when --tasks is
# used with a min/max range.
for task_descriptor in self.task_descriptors:
if task_descriptor.task_metadata.get('task-id') == task_id:
return task_descriptor # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['task_descriptor']]
return None |
def sync_params(self):
""" Ensure that shared parameters are the same value everywhere """
def _normalize(comps, param):
vals = [c.get_values(param) for c in comps]
diff = any([vals[i] != vals[i+1] for i in range(len(vals)-1)])
if diff:
for c in comps:
c.set_values(param, vals[0])
for param, comps in iteritems(self.lmap):
if isinstance(comps, list) and len(comps) > 1:
_normalize(comps, param) | def function[sync_params, parameter[self]]:
constant[ Ensure that shared parameters are the same value everywhere ]
def function[_normalize, parameter[comps, param]]:
variable[vals] assign[=] <ast.ListComp object at 0x7da204564490>
variable[diff] assign[=] call[name[any], parameter[<ast.ListComp object at 0x7da204564a90>]]
if name[diff] begin[:]
for taget[name[c]] in starred[name[comps]] begin[:]
call[name[c].set_values, parameter[name[param], call[name[vals]][constant[0]]]]
for taget[tuple[[<ast.Name object at 0x7da2045645e0>, <ast.Name object at 0x7da204566c80>]]] in starred[call[name[iteritems], parameter[name[self].lmap]]] begin[:]
if <ast.BoolOp object at 0x7da204565e70> begin[:]
call[name[_normalize], parameter[name[comps], name[param]]] | keyword[def] identifier[sync_params] ( identifier[self] ):
literal[string]
keyword[def] identifier[_normalize] ( identifier[comps] , identifier[param] ):
identifier[vals] =[ identifier[c] . identifier[get_values] ( identifier[param] ) keyword[for] identifier[c] keyword[in] identifier[comps] ]
identifier[diff] = identifier[any] ([ identifier[vals] [ identifier[i] ]!= identifier[vals] [ identifier[i] + literal[int] ] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[vals] )- literal[int] )])
keyword[if] identifier[diff] :
keyword[for] identifier[c] keyword[in] identifier[comps] :
identifier[c] . identifier[set_values] ( identifier[param] , identifier[vals] [ literal[int] ])
keyword[for] identifier[param] , identifier[comps] keyword[in] identifier[iteritems] ( identifier[self] . identifier[lmap] ):
keyword[if] identifier[isinstance] ( identifier[comps] , identifier[list] ) keyword[and] identifier[len] ( identifier[comps] )> literal[int] :
identifier[_normalize] ( identifier[comps] , identifier[param] ) | def sync_params(self):
""" Ensure that shared parameters are the same value everywhere """
def _normalize(comps, param):
vals = [c.get_values(param) for c in comps]
diff = any([vals[i] != vals[i + 1] for i in range(len(vals) - 1)])
if diff:
for c in comps:
c.set_values(param, vals[0]) # depends on [control=['for'], data=['c']] # depends on [control=['if'], data=[]]
for (param, comps) in iteritems(self.lmap):
if isinstance(comps, list) and len(comps) > 1:
_normalize(comps, param) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] |
def Delete(self, n = 1, dl = 0):
"""删除键n次
"""
self.Delay(dl)
self.keyboard.tap_key(self.keyboard.delete_key, n) | def function[Delete, parameter[self, n, dl]]:
constant[删除键n次
]
call[name[self].Delay, parameter[name[dl]]]
call[name[self].keyboard.tap_key, parameter[name[self].keyboard.delete_key, name[n]]] | keyword[def] identifier[Delete] ( identifier[self] , identifier[n] = literal[int] , identifier[dl] = literal[int] ):
literal[string]
identifier[self] . identifier[Delay] ( identifier[dl] )
identifier[self] . identifier[keyboard] . identifier[tap_key] ( identifier[self] . identifier[keyboard] . identifier[delete_key] , identifier[n] ) | def Delete(self, n=1, dl=0):
"""删除键n次
"""
self.Delay(dl)
self.keyboard.tap_key(self.keyboard.delete_key, n) |
def invoke(*args, **kwargs):
"""Invokes a command callback in exactly the way it expects. There
are two ways to invoke this method:
1. the first argument can be a callback and all other arguments and
keyword arguments are forwarded directly to the function.
2. the first argument is a click command object. In that case all
arguments are forwarded as well but proper click parameters
(options and click arguments) must be keyword arguments and Click
will fill in defaults.
Note that before Click 3.2 keyword arguments were not properly filled
in against the intention of this code and no context was created. For
more information about this change and why it was done in a bugfix
release see :ref:`upgrade-to-3.2`.
"""
self, callback = args[:2]
ctx = self
# It's also possible to invoke another command which might or
# might not have a callback. In that case we also fill
# in defaults and make a new context for this command.
if isinstance(callback, Command):
other_cmd = callback
callback = other_cmd.callback
ctx = Context(other_cmd, info_name=other_cmd.name, parent=self)
if callback is None:
raise TypeError('The given command does not have a '
'callback that can be invoked.')
for param in other_cmd.params:
if param.name not in kwargs and param.expose_value:
kwargs[param.name] = param.get_default(ctx)
args = args[2:]
with augment_usage_errors(self):
with ctx:
return callback(*args, **kwargs) | def function[invoke, parameter[]]:
constant[Invokes a command callback in exactly the way it expects. There
are two ways to invoke this method:
1. the first argument can be a callback and all other arguments and
keyword arguments are forwarded directly to the function.
2. the first argument is a click command object. In that case all
arguments are forwarded as well but proper click parameters
(options and click arguments) must be keyword arguments and Click
will fill in defaults.
Note that before Click 3.2 keyword arguments were not properly filled
in against the intention of this code and no context was created. For
more information about this change and why it was done in a bugfix
release see :ref:`upgrade-to-3.2`.
]
<ast.Tuple object at 0x7da2041dad40> assign[=] call[name[args]][<ast.Slice object at 0x7da2041d8e50>]
variable[ctx] assign[=] name[self]
if call[name[isinstance], parameter[name[callback], name[Command]]] begin[:]
variable[other_cmd] assign[=] name[callback]
variable[callback] assign[=] name[other_cmd].callback
variable[ctx] assign[=] call[name[Context], parameter[name[other_cmd]]]
if compare[name[callback] is constant[None]] begin[:]
<ast.Raise object at 0x7da2041d8cd0>
for taget[name[param]] in starred[name[other_cmd].params] begin[:]
if <ast.BoolOp object at 0x7da2041da950> begin[:]
call[name[kwargs]][name[param].name] assign[=] call[name[param].get_default, parameter[name[ctx]]]
variable[args] assign[=] call[name[args]][<ast.Slice object at 0x7da1b23448b0>]
with call[name[augment_usage_errors], parameter[name[self]]] begin[:]
with name[ctx] begin[:]
return[call[name[callback], parameter[<ast.Starred object at 0x7da1b2346230>]]] | keyword[def] identifier[invoke] (* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[self] , identifier[callback] = identifier[args] [: literal[int] ]
identifier[ctx] = identifier[self]
keyword[if] identifier[isinstance] ( identifier[callback] , identifier[Command] ):
identifier[other_cmd] = identifier[callback]
identifier[callback] = identifier[other_cmd] . identifier[callback]
identifier[ctx] = identifier[Context] ( identifier[other_cmd] , identifier[info_name] = identifier[other_cmd] . identifier[name] , identifier[parent] = identifier[self] )
keyword[if] identifier[callback] keyword[is] keyword[None] :
keyword[raise] identifier[TypeError] ( literal[string]
literal[string] )
keyword[for] identifier[param] keyword[in] identifier[other_cmd] . identifier[params] :
keyword[if] identifier[param] . identifier[name] keyword[not] keyword[in] identifier[kwargs] keyword[and] identifier[param] . identifier[expose_value] :
identifier[kwargs] [ identifier[param] . identifier[name] ]= identifier[param] . identifier[get_default] ( identifier[ctx] )
identifier[args] = identifier[args] [ literal[int] :]
keyword[with] identifier[augment_usage_errors] ( identifier[self] ):
keyword[with] identifier[ctx] :
keyword[return] identifier[callback] (* identifier[args] ,** identifier[kwargs] ) | def invoke(*args, **kwargs):
"""Invokes a command callback in exactly the way it expects. There
are two ways to invoke this method:
1. the first argument can be a callback and all other arguments and
keyword arguments are forwarded directly to the function.
2. the first argument is a click command object. In that case all
arguments are forwarded as well but proper click parameters
(options and click arguments) must be keyword arguments and Click
will fill in defaults.
Note that before Click 3.2 keyword arguments were not properly filled
in against the intention of this code and no context was created. For
more information about this change and why it was done in a bugfix
release see :ref:`upgrade-to-3.2`.
"""
(self, callback) = args[:2]
ctx = self
# It's also possible to invoke another command which might or
# might not have a callback. In that case we also fill
# in defaults and make a new context for this command.
if isinstance(callback, Command):
other_cmd = callback
callback = other_cmd.callback
ctx = Context(other_cmd, info_name=other_cmd.name, parent=self)
if callback is None:
raise TypeError('The given command does not have a callback that can be invoked.') # depends on [control=['if'], data=[]]
for param in other_cmd.params:
if param.name not in kwargs and param.expose_value:
kwargs[param.name] = param.get_default(ctx) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['param']] # depends on [control=['if'], data=[]]
args = args[2:]
with augment_usage_errors(self):
with ctx:
return callback(*args, **kwargs) # depends on [control=['with'], data=[]] # depends on [control=['with'], data=[]] |
def modify_file_in_place(self, fp, length, iso_path, rr_name=None, # pylint: disable=unused-argument
joliet_path=None, udf_path=None): # pylint: disable=unused-argument
# type: (BinaryIO, int, str, Optional[str], Optional[str], Optional[str]) -> None
'''
An API to modify a file in place on the ISO. This can be extremely fast
(much faster than calling the write method), but has many restrictions.
1. The original ISO file pointer must have been opened for reading
and writing.
2. Only an existing *file* can be modified; directories cannot be
changed.
3. Only an existing file can be *modified*; no new files can be added
or removed.
4. The new file contents must use the same number of extents (typically
2048 bytes) as the old file contents. If using this API to shrink
a file, this is usually easy since the new contents can be padded
out with zeros or newlines to meet the requirement. If using this
API to grow a file, the new contents can only grow up to the next
extent boundary.
Unlike all other APIs in PyCdlib, this API actually modifies the
originally opened on-disk file, so use it with caution.
Parameters:
fp - The file object to use for the contents of the new file.
length - The length of the new data for the file.
iso_path - The ISO9660 absolute path to the file destination on the ISO.
rr_name - The Rock Ridge name of the file destination on the ISO.
joliet_path - The Joliet absolute path to the file destination on the ISO.
udf_path - The UDF absolute path to the file destination on the ISO.
Returns:
Nothing.
'''
if not self._initialized:
raise pycdlibexception.PyCdlibInvalidInput('This object is not yet initialized; call either open() or new() to create an ISO')
if hasattr(self._cdfp, 'mode') and not self._cdfp.mode.startswith(('r+', 'w', 'a', 'rb+')):
raise pycdlibexception.PyCdlibInvalidInput('To modify a file in place, the original ISO must have been opened in a write mode (r+, w, or a)')
log_block_size = self.pvd.logical_block_size()
child = self._find_iso_record(utils.normpath(iso_path))
old_num_extents = utils.ceiling_div(child.get_data_length(), log_block_size)
new_num_extents = utils.ceiling_div(length, log_block_size)
if old_num_extents != new_num_extents:
raise pycdlibexception.PyCdlibInvalidInput('When modifying a file in-place, the number of extents for a file cannot change!')
if not child.is_file():
raise pycdlibexception.PyCdlibInvalidInput('Cannot modify a directory with modify_file_in_place')
if child.inode is None:
raise pycdlibexception.PyCdlibInternalError('Child file found without inode')
child.inode.update_fp(fp, length)
# Remove the old size from the PVD size
for pvd in self.pvds:
pvd.remove_from_space_size(child.get_data_length())
# And add the new size to the PVD size
for pvd in self.pvds:
pvd.add_to_space_size(length)
if self.enhanced_vd is not None:
self.enhanced_vd.copy_sizes(self.pvd)
# If we made it here, we have successfully updated all of the in-memory
# metadata. Now we can go and modify the on-disk file.
self._cdfp.seek(self.pvd.extent_location() * log_block_size)
# First write out the PVD.
rec = self.pvd.record()
self._cdfp.write(rec)
# Write out the joliet VD
if self.joliet_vd is not None:
self._cdfp.seek(self.joliet_vd.extent_location() * log_block_size)
rec = self.joliet_vd.record()
self._cdfp.write(rec)
# Write out the enhanced VD
if self.enhanced_vd is not None:
self._cdfp.seek(self.enhanced_vd.extent_location() * log_block_size)
rec = self.enhanced_vd.record()
self._cdfp.write(rec)
# We don't have to write anything out for UDF since it only tracks
# extents, and we know we aren't changing the number of extents.
# Write out the actual file contents
self._cdfp.seek(child.extent_location() * log_block_size)
with inode.InodeOpenData(child.inode, log_block_size) as (data_fp, data_len):
utils.copy_data(data_len, log_block_size, data_fp, self._cdfp)
utils.zero_pad(self._cdfp, data_len, log_block_size)
# Finally write out the directory record entry.
# This is a little tricky because of what things mean. First of all,
# child.extents_to_here represents the total number of extents up to
# this child in the parent. Thus, to get the absolute extent offset,
# we start with the parent's extent location, add on the number of
# extents to here, and remove 1 (since our offset will be zero-based).
# Second, child.offset_to_here is the *last* byte that the child uses,
# so to get the start of it we subtract off the length of the child.
# Then we can multiple the extent location by the logical block size,
# add on the offset, and get to the absolute location in the file.
first_joliet = True
for record in child.inode.linked_records:
if isinstance(record, dr.DirectoryRecord):
if self.joliet_vd is not None and id(record.vd) == id(self.joliet_vd) and first_joliet:
first_joliet = False
self.joliet_vd.remove_from_space_size(record.get_data_length())
self.joliet_vd.add_to_space_size(length)
if record.parent is None:
raise pycdlibexception.PyCdlibInternalError('Modifying file with empty parent')
abs_extent_loc = record.parent.extent_location() + record.extents_to_here - 1
offset = record.offset_to_here - record.dr_len
abs_offset = abs_extent_loc * log_block_size + offset
elif isinstance(record, udfmod.UDFFileEntry):
abs_offset = record.extent_location() * log_block_size
record.set_data_length(length)
self._cdfp.seek(abs_offset)
self._cdfp.write(record.record()) | def function[modify_file_in_place, parameter[self, fp, length, iso_path, rr_name, joliet_path, udf_path]]:
constant[
An API to modify a file in place on the ISO. This can be extremely fast
(much faster than calling the write method), but has many restrictions.
1. The original ISO file pointer must have been opened for reading
and writing.
2. Only an existing *file* can be modified; directories cannot be
changed.
3. Only an existing file can be *modified*; no new files can be added
or removed.
4. The new file contents must use the same number of extents (typically
2048 bytes) as the old file contents. If using this API to shrink
a file, this is usually easy since the new contents can be padded
out with zeros or newlines to meet the requirement. If using this
API to grow a file, the new contents can only grow up to the next
extent boundary.
Unlike all other APIs in PyCdlib, this API actually modifies the
originally opened on-disk file, so use it with caution.
Parameters:
fp - The file object to use for the contents of the new file.
length - The length of the new data for the file.
iso_path - The ISO9660 absolute path to the file destination on the ISO.
rr_name - The Rock Ridge name of the file destination on the ISO.
joliet_path - The Joliet absolute path to the file destination on the ISO.
udf_path - The UDF absolute path to the file destination on the ISO.
Returns:
Nothing.
]
if <ast.UnaryOp object at 0x7da1b0f62b00> begin[:]
<ast.Raise object at 0x7da1b0de13f0>
if <ast.BoolOp object at 0x7da1b0de3250> begin[:]
<ast.Raise object at 0x7da1b0de3160>
variable[log_block_size] assign[=] call[name[self].pvd.logical_block_size, parameter[]]
variable[child] assign[=] call[name[self]._find_iso_record, parameter[call[name[utils].normpath, parameter[name[iso_path]]]]]
variable[old_num_extents] assign[=] call[name[utils].ceiling_div, parameter[call[name[child].get_data_length, parameter[]], name[log_block_size]]]
variable[new_num_extents] assign[=] call[name[utils].ceiling_div, parameter[name[length], name[log_block_size]]]
if compare[name[old_num_extents] not_equal[!=] name[new_num_extents]] begin[:]
<ast.Raise object at 0x7da1b0de2e90>
if <ast.UnaryOp object at 0x7da1b0de3130> begin[:]
<ast.Raise object at 0x7da1b0de17e0>
if compare[name[child].inode is constant[None]] begin[:]
<ast.Raise object at 0x7da1b0de2bf0>
call[name[child].inode.update_fp, parameter[name[fp], name[length]]]
for taget[name[pvd]] in starred[name[self].pvds] begin[:]
call[name[pvd].remove_from_space_size, parameter[call[name[child].get_data_length, parameter[]]]]
for taget[name[pvd]] in starred[name[self].pvds] begin[:]
call[name[pvd].add_to_space_size, parameter[name[length]]]
if compare[name[self].enhanced_vd is_not constant[None]] begin[:]
call[name[self].enhanced_vd.copy_sizes, parameter[name[self].pvd]]
call[name[self]._cdfp.seek, parameter[binary_operation[call[name[self].pvd.extent_location, parameter[]] * name[log_block_size]]]]
variable[rec] assign[=] call[name[self].pvd.record, parameter[]]
call[name[self]._cdfp.write, parameter[name[rec]]]
if compare[name[self].joliet_vd is_not constant[None]] begin[:]
call[name[self]._cdfp.seek, parameter[binary_operation[call[name[self].joliet_vd.extent_location, parameter[]] * name[log_block_size]]]]
variable[rec] assign[=] call[name[self].joliet_vd.record, parameter[]]
call[name[self]._cdfp.write, parameter[name[rec]]]
if compare[name[self].enhanced_vd is_not constant[None]] begin[:]
call[name[self]._cdfp.seek, parameter[binary_operation[call[name[self].enhanced_vd.extent_location, parameter[]] * name[log_block_size]]]]
variable[rec] assign[=] call[name[self].enhanced_vd.record, parameter[]]
call[name[self]._cdfp.write, parameter[name[rec]]]
call[name[self]._cdfp.seek, parameter[binary_operation[call[name[child].extent_location, parameter[]] * name[log_block_size]]]]
with call[name[inode].InodeOpenData, parameter[name[child].inode, name[log_block_size]]] begin[:]
call[name[utils].copy_data, parameter[name[data_len], name[log_block_size], name[data_fp], name[self]._cdfp]]
call[name[utils].zero_pad, parameter[name[self]._cdfp, name[data_len], name[log_block_size]]]
variable[first_joliet] assign[=] constant[True]
for taget[name[record]] in starred[name[child].inode.linked_records] begin[:]
if call[name[isinstance], parameter[name[record], name[dr].DirectoryRecord]] begin[:]
if <ast.BoolOp object at 0x7da1b0d63ca0> begin[:]
variable[first_joliet] assign[=] constant[False]
call[name[self].joliet_vd.remove_from_space_size, parameter[call[name[record].get_data_length, parameter[]]]]
call[name[self].joliet_vd.add_to_space_size, parameter[name[length]]]
if compare[name[record].parent is constant[None]] begin[:]
<ast.Raise object at 0x7da1b0d61cc0>
variable[abs_extent_loc] assign[=] binary_operation[binary_operation[call[name[record].parent.extent_location, parameter[]] + name[record].extents_to_here] - constant[1]]
variable[offset] assign[=] binary_operation[name[record].offset_to_here - name[record].dr_len]
variable[abs_offset] assign[=] binary_operation[binary_operation[name[abs_extent_loc] * name[log_block_size]] + name[offset]]
call[name[record].set_data_length, parameter[name[length]]]
call[name[self]._cdfp.seek, parameter[name[abs_offset]]]
call[name[self]._cdfp.write, parameter[call[name[record].record, parameter[]]]] | keyword[def] identifier[modify_file_in_place] ( identifier[self] , identifier[fp] , identifier[length] , identifier[iso_path] , identifier[rr_name] = keyword[None] ,
identifier[joliet_path] = keyword[None] , identifier[udf_path] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_initialized] :
keyword[raise] identifier[pycdlibexception] . identifier[PyCdlibInvalidInput] ( literal[string] )
keyword[if] identifier[hasattr] ( identifier[self] . identifier[_cdfp] , literal[string] ) keyword[and] keyword[not] identifier[self] . identifier[_cdfp] . identifier[mode] . identifier[startswith] (( literal[string] , literal[string] , literal[string] , literal[string] )):
keyword[raise] identifier[pycdlibexception] . identifier[PyCdlibInvalidInput] ( literal[string] )
identifier[log_block_size] = identifier[self] . identifier[pvd] . identifier[logical_block_size] ()
identifier[child] = identifier[self] . identifier[_find_iso_record] ( identifier[utils] . identifier[normpath] ( identifier[iso_path] ))
identifier[old_num_extents] = identifier[utils] . identifier[ceiling_div] ( identifier[child] . identifier[get_data_length] (), identifier[log_block_size] )
identifier[new_num_extents] = identifier[utils] . identifier[ceiling_div] ( identifier[length] , identifier[log_block_size] )
keyword[if] identifier[old_num_extents] != identifier[new_num_extents] :
keyword[raise] identifier[pycdlibexception] . identifier[PyCdlibInvalidInput] ( literal[string] )
keyword[if] keyword[not] identifier[child] . identifier[is_file] ():
keyword[raise] identifier[pycdlibexception] . identifier[PyCdlibInvalidInput] ( literal[string] )
keyword[if] identifier[child] . identifier[inode] keyword[is] keyword[None] :
keyword[raise] identifier[pycdlibexception] . identifier[PyCdlibInternalError] ( literal[string] )
identifier[child] . identifier[inode] . identifier[update_fp] ( identifier[fp] , identifier[length] )
keyword[for] identifier[pvd] keyword[in] identifier[self] . identifier[pvds] :
identifier[pvd] . identifier[remove_from_space_size] ( identifier[child] . identifier[get_data_length] ())
keyword[for] identifier[pvd] keyword[in] identifier[self] . identifier[pvds] :
identifier[pvd] . identifier[add_to_space_size] ( identifier[length] )
keyword[if] identifier[self] . identifier[enhanced_vd] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[enhanced_vd] . identifier[copy_sizes] ( identifier[self] . identifier[pvd] )
identifier[self] . identifier[_cdfp] . identifier[seek] ( identifier[self] . identifier[pvd] . identifier[extent_location] ()* identifier[log_block_size] )
identifier[rec] = identifier[self] . identifier[pvd] . identifier[record] ()
identifier[self] . identifier[_cdfp] . identifier[write] ( identifier[rec] )
keyword[if] identifier[self] . identifier[joliet_vd] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[_cdfp] . identifier[seek] ( identifier[self] . identifier[joliet_vd] . identifier[extent_location] ()* identifier[log_block_size] )
identifier[rec] = identifier[self] . identifier[joliet_vd] . identifier[record] ()
identifier[self] . identifier[_cdfp] . identifier[write] ( identifier[rec] )
keyword[if] identifier[self] . identifier[enhanced_vd] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[_cdfp] . identifier[seek] ( identifier[self] . identifier[enhanced_vd] . identifier[extent_location] ()* identifier[log_block_size] )
identifier[rec] = identifier[self] . identifier[enhanced_vd] . identifier[record] ()
identifier[self] . identifier[_cdfp] . identifier[write] ( identifier[rec] )
identifier[self] . identifier[_cdfp] . identifier[seek] ( identifier[child] . identifier[extent_location] ()* identifier[log_block_size] )
keyword[with] identifier[inode] . identifier[InodeOpenData] ( identifier[child] . identifier[inode] , identifier[log_block_size] ) keyword[as] ( identifier[data_fp] , identifier[data_len] ):
identifier[utils] . identifier[copy_data] ( identifier[data_len] , identifier[log_block_size] , identifier[data_fp] , identifier[self] . identifier[_cdfp] )
identifier[utils] . identifier[zero_pad] ( identifier[self] . identifier[_cdfp] , identifier[data_len] , identifier[log_block_size] )
identifier[first_joliet] = keyword[True]
keyword[for] identifier[record] keyword[in] identifier[child] . identifier[inode] . identifier[linked_records] :
keyword[if] identifier[isinstance] ( identifier[record] , identifier[dr] . identifier[DirectoryRecord] ):
keyword[if] identifier[self] . identifier[joliet_vd] keyword[is] keyword[not] keyword[None] keyword[and] identifier[id] ( identifier[record] . identifier[vd] )== identifier[id] ( identifier[self] . identifier[joliet_vd] ) keyword[and] identifier[first_joliet] :
identifier[first_joliet] = keyword[False]
identifier[self] . identifier[joliet_vd] . identifier[remove_from_space_size] ( identifier[record] . identifier[get_data_length] ())
identifier[self] . identifier[joliet_vd] . identifier[add_to_space_size] ( identifier[length] )
keyword[if] identifier[record] . identifier[parent] keyword[is] keyword[None] :
keyword[raise] identifier[pycdlibexception] . identifier[PyCdlibInternalError] ( literal[string] )
identifier[abs_extent_loc] = identifier[record] . identifier[parent] . identifier[extent_location] ()+ identifier[record] . identifier[extents_to_here] - literal[int]
identifier[offset] = identifier[record] . identifier[offset_to_here] - identifier[record] . identifier[dr_len]
identifier[abs_offset] = identifier[abs_extent_loc] * identifier[log_block_size] + identifier[offset]
keyword[elif] identifier[isinstance] ( identifier[record] , identifier[udfmod] . identifier[UDFFileEntry] ):
identifier[abs_offset] = identifier[record] . identifier[extent_location] ()* identifier[log_block_size]
identifier[record] . identifier[set_data_length] ( identifier[length] )
identifier[self] . identifier[_cdfp] . identifier[seek] ( identifier[abs_offset] )
identifier[self] . identifier[_cdfp] . identifier[write] ( identifier[record] . identifier[record] ()) | def modify_file_in_place(self, fp, length, iso_path, rr_name=None, joliet_path=None, udf_path=None): # pylint: disable=unused-argument
# pylint: disable=unused-argument
# type: (BinaryIO, int, str, Optional[str], Optional[str], Optional[str]) -> None
'\n An API to modify a file in place on the ISO. This can be extremely fast\n (much faster than calling the write method), but has many restrictions.\n\n 1. The original ISO file pointer must have been opened for reading\n and writing.\n 2. Only an existing *file* can be modified; directories cannot be\n changed.\n 3. Only an existing file can be *modified*; no new files can be added\n or removed.\n 4. The new file contents must use the same number of extents (typically\n 2048 bytes) as the old file contents. If using this API to shrink\n a file, this is usually easy since the new contents can be padded\n out with zeros or newlines to meet the requirement. If using this\n API to grow a file, the new contents can only grow up to the next\n extent boundary.\n\n Unlike all other APIs in PyCdlib, this API actually modifies the\n originally opened on-disk file, so use it with caution.\n\n Parameters:\n fp - The file object to use for the contents of the new file.\n length - The length of the new data for the file.\n iso_path - The ISO9660 absolute path to the file destination on the ISO.\n rr_name - The Rock Ridge name of the file destination on the ISO.\n joliet_path - The Joliet absolute path to the file destination on the ISO.\n udf_path - The UDF absolute path to the file destination on the ISO.\n Returns:\n Nothing.\n '
if not self._initialized:
raise pycdlibexception.PyCdlibInvalidInput('This object is not yet initialized; call either open() or new() to create an ISO') # depends on [control=['if'], data=[]]
if hasattr(self._cdfp, 'mode') and (not self._cdfp.mode.startswith(('r+', 'w', 'a', 'rb+'))):
raise pycdlibexception.PyCdlibInvalidInput('To modify a file in place, the original ISO must have been opened in a write mode (r+, w, or a)') # depends on [control=['if'], data=[]]
log_block_size = self.pvd.logical_block_size()
child = self._find_iso_record(utils.normpath(iso_path))
old_num_extents = utils.ceiling_div(child.get_data_length(), log_block_size)
new_num_extents = utils.ceiling_div(length, log_block_size)
if old_num_extents != new_num_extents:
raise pycdlibexception.PyCdlibInvalidInput('When modifying a file in-place, the number of extents for a file cannot change!') # depends on [control=['if'], data=[]]
if not child.is_file():
raise pycdlibexception.PyCdlibInvalidInput('Cannot modify a directory with modify_file_in_place') # depends on [control=['if'], data=[]]
if child.inode is None:
raise pycdlibexception.PyCdlibInternalError('Child file found without inode') # depends on [control=['if'], data=[]]
child.inode.update_fp(fp, length)
# Remove the old size from the PVD size
for pvd in self.pvds:
pvd.remove_from_space_size(child.get_data_length()) # depends on [control=['for'], data=['pvd']]
# And add the new size to the PVD size
for pvd in self.pvds:
pvd.add_to_space_size(length) # depends on [control=['for'], data=['pvd']]
if self.enhanced_vd is not None:
self.enhanced_vd.copy_sizes(self.pvd) # depends on [control=['if'], data=[]]
# If we made it here, we have successfully updated all of the in-memory
# metadata. Now we can go and modify the on-disk file.
self._cdfp.seek(self.pvd.extent_location() * log_block_size)
# First write out the PVD.
rec = self.pvd.record()
self._cdfp.write(rec)
# Write out the joliet VD
if self.joliet_vd is not None:
self._cdfp.seek(self.joliet_vd.extent_location() * log_block_size)
rec = self.joliet_vd.record()
self._cdfp.write(rec) # depends on [control=['if'], data=[]]
# Write out the enhanced VD
if self.enhanced_vd is not None:
self._cdfp.seek(self.enhanced_vd.extent_location() * log_block_size)
rec = self.enhanced_vd.record()
self._cdfp.write(rec) # depends on [control=['if'], data=[]]
# We don't have to write anything out for UDF since it only tracks
# extents, and we know we aren't changing the number of extents.
# Write out the actual file contents
self._cdfp.seek(child.extent_location() * log_block_size)
with inode.InodeOpenData(child.inode, log_block_size) as (data_fp, data_len):
utils.copy_data(data_len, log_block_size, data_fp, self._cdfp)
utils.zero_pad(self._cdfp, data_len, log_block_size) # depends on [control=['with'], data=[]]
# Finally write out the directory record entry.
# This is a little tricky because of what things mean. First of all,
# child.extents_to_here represents the total number of extents up to
# this child in the parent. Thus, to get the absolute extent offset,
# we start with the parent's extent location, add on the number of
# extents to here, and remove 1 (since our offset will be zero-based).
# Second, child.offset_to_here is the *last* byte that the child uses,
# so to get the start of it we subtract off the length of the child.
# Then we can multiple the extent location by the logical block size,
# add on the offset, and get to the absolute location in the file.
first_joliet = True
for record in child.inode.linked_records:
if isinstance(record, dr.DirectoryRecord):
if self.joliet_vd is not None and id(record.vd) == id(self.joliet_vd) and first_joliet:
first_joliet = False
self.joliet_vd.remove_from_space_size(record.get_data_length())
self.joliet_vd.add_to_space_size(length) # depends on [control=['if'], data=[]]
if record.parent is None:
raise pycdlibexception.PyCdlibInternalError('Modifying file with empty parent') # depends on [control=['if'], data=[]]
abs_extent_loc = record.parent.extent_location() + record.extents_to_here - 1
offset = record.offset_to_here - record.dr_len
abs_offset = abs_extent_loc * log_block_size + offset # depends on [control=['if'], data=[]]
elif isinstance(record, udfmod.UDFFileEntry):
abs_offset = record.extent_location() * log_block_size # depends on [control=['if'], data=[]]
record.set_data_length(length)
self._cdfp.seek(abs_offset)
self._cdfp.write(record.record()) # depends on [control=['for'], data=['record']] |
def _to_dict(self):
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'sentence') and self.sentence is not None:
_dict['sentence'] = self.sentence
if hasattr(self, 'subject') and self.subject is not None:
_dict['subject'] = self.subject._to_dict()
if hasattr(self, 'action') and self.action is not None:
_dict['action'] = self.action._to_dict()
if hasattr(self, 'object') and self.object is not None:
_dict['object'] = self.object._to_dict()
return _dict | def function[_to_dict, parameter[self]]:
constant[Return a json dictionary representing this model.]
variable[_dict] assign[=] dictionary[[], []]
if <ast.BoolOp object at 0x7da18bcc8d60> begin[:]
call[name[_dict]][constant[sentence]] assign[=] name[self].sentence
if <ast.BoolOp object at 0x7da18bcc9f30> begin[:]
call[name[_dict]][constant[subject]] assign[=] call[name[self].subject._to_dict, parameter[]]
if <ast.BoolOp object at 0x7da18bccbdf0> begin[:]
call[name[_dict]][constant[action]] assign[=] call[name[self].action._to_dict, parameter[]]
if <ast.BoolOp object at 0x7da18bcc9090> begin[:]
call[name[_dict]][constant[object]] assign[=] call[name[self].object._to_dict, parameter[]]
return[name[_dict]] | keyword[def] identifier[_to_dict] ( identifier[self] ):
literal[string]
identifier[_dict] ={}
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ) keyword[and] identifier[self] . identifier[sentence] keyword[is] keyword[not] keyword[None] :
identifier[_dict] [ literal[string] ]= identifier[self] . identifier[sentence]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ) keyword[and] identifier[self] . identifier[subject] keyword[is] keyword[not] keyword[None] :
identifier[_dict] [ literal[string] ]= identifier[self] . identifier[subject] . identifier[_to_dict] ()
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ) keyword[and] identifier[self] . identifier[action] keyword[is] keyword[not] keyword[None] :
identifier[_dict] [ literal[string] ]= identifier[self] . identifier[action] . identifier[_to_dict] ()
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ) keyword[and] identifier[self] . identifier[object] keyword[is] keyword[not] keyword[None] :
identifier[_dict] [ literal[string] ]= identifier[self] . identifier[object] . identifier[_to_dict] ()
keyword[return] identifier[_dict] | def _to_dict(self):
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'sentence') and self.sentence is not None:
_dict['sentence'] = self.sentence # depends on [control=['if'], data=[]]
if hasattr(self, 'subject') and self.subject is not None:
_dict['subject'] = self.subject._to_dict() # depends on [control=['if'], data=[]]
if hasattr(self, 'action') and self.action is not None:
_dict['action'] = self.action._to_dict() # depends on [control=['if'], data=[]]
if hasattr(self, 'object') and self.object is not None:
_dict['object'] = self.object._to_dict() # depends on [control=['if'], data=[]]
return _dict |
def create_object(self, cls: Type[T], additional_kwargs=None) -> T:
"""Create a new instance, satisfying any dependencies on cls."""
additional_kwargs = additional_kwargs or {}
log.debug('%sCreating %r object with %r', self._log_prefix, cls, additional_kwargs)
try:
instance = cls.__new__(cls)
except TypeError as e:
reraise(
e,
CallError(cls, getattr(cls.__new__, '__func__', cls.__new__), (), {}, e, self._stack),
maximum_frames=2,
)
try:
init = cls.__init__
self.call_with_injection(init, self_=instance, kwargs=additional_kwargs)
except TypeError as e:
# The reason why getattr() fallback is used here is that
# __init__.__func__ apparently doesn't exist for Key-type objects
reraise(
e,
CallError(
instance,
getattr(instance.__init__, '__func__', instance.__init__),
(),
additional_kwargs,
e,
self._stack,
),
)
return instance | def function[create_object, parameter[self, cls, additional_kwargs]]:
constant[Create a new instance, satisfying any dependencies on cls.]
variable[additional_kwargs] assign[=] <ast.BoolOp object at 0x7da18eb55240>
call[name[log].debug, parameter[constant[%sCreating %r object with %r], name[self]._log_prefix, name[cls], name[additional_kwargs]]]
<ast.Try object at 0x7da18eb574f0>
<ast.Try object at 0x7da18eb57d00>
return[name[instance]] | keyword[def] identifier[create_object] ( identifier[self] , identifier[cls] : identifier[Type] [ identifier[T] ], identifier[additional_kwargs] = keyword[None] )-> identifier[T] :
literal[string]
identifier[additional_kwargs] = identifier[additional_kwargs] keyword[or] {}
identifier[log] . identifier[debug] ( literal[string] , identifier[self] . identifier[_log_prefix] , identifier[cls] , identifier[additional_kwargs] )
keyword[try] :
identifier[instance] = identifier[cls] . identifier[__new__] ( identifier[cls] )
keyword[except] identifier[TypeError] keyword[as] identifier[e] :
identifier[reraise] (
identifier[e] ,
identifier[CallError] ( identifier[cls] , identifier[getattr] ( identifier[cls] . identifier[__new__] , literal[string] , identifier[cls] . identifier[__new__] ),(),{}, identifier[e] , identifier[self] . identifier[_stack] ),
identifier[maximum_frames] = literal[int] ,
)
keyword[try] :
identifier[init] = identifier[cls] . identifier[__init__]
identifier[self] . identifier[call_with_injection] ( identifier[init] , identifier[self_] = identifier[instance] , identifier[kwargs] = identifier[additional_kwargs] )
keyword[except] identifier[TypeError] keyword[as] identifier[e] :
identifier[reraise] (
identifier[e] ,
identifier[CallError] (
identifier[instance] ,
identifier[getattr] ( identifier[instance] . identifier[__init__] , literal[string] , identifier[instance] . identifier[__init__] ),
(),
identifier[additional_kwargs] ,
identifier[e] ,
identifier[self] . identifier[_stack] ,
),
)
keyword[return] identifier[instance] | def create_object(self, cls: Type[T], additional_kwargs=None) -> T:
"""Create a new instance, satisfying any dependencies on cls."""
additional_kwargs = additional_kwargs or {}
log.debug('%sCreating %r object with %r', self._log_prefix, cls, additional_kwargs)
try:
instance = cls.__new__(cls) # depends on [control=['try'], data=[]]
except TypeError as e:
reraise(e, CallError(cls, getattr(cls.__new__, '__func__', cls.__new__), (), {}, e, self._stack), maximum_frames=2) # depends on [control=['except'], data=['e']]
try:
init = cls.__init__
self.call_with_injection(init, self_=instance, kwargs=additional_kwargs) # depends on [control=['try'], data=[]]
except TypeError as e:
# The reason why getattr() fallback is used here is that
# __init__.__func__ apparently doesn't exist for Key-type objects
reraise(e, CallError(instance, getattr(instance.__init__, '__func__', instance.__init__), (), additional_kwargs, e, self._stack)) # depends on [control=['except'], data=['e']]
return instance |
def Geometry(*args, **kwargs):
"""Returns an ogr.Geometry instance optionally created from a geojson str
or dict. The spatial reference may also be provided.
"""
# Look for geojson as a positional or keyword arg.
arg = kwargs.pop('geojson', None) or len(args) and args[0]
try:
srs = kwargs.pop('srs', None) or arg.srs.wkt
except AttributeError:
srs = SpatialReference(4326)
if hasattr(arg, 'keys'):
geom = ogr.CreateGeometryFromJson(json.dumps(arg))
elif hasattr(arg, 'startswith'):
# WKB as hexadecimal string.
char = arg[0] if arg else ' '
i = char if isinstance(char, int) else ord(char)
if i in (0, 1):
geom = ogr.CreateGeometryFromWkb(arg)
elif arg.startswith('{'):
geom = ogr.CreateGeometryFromJson(arg)
elif arg.startswith('<gml'):
geom = ogr.CreateGeometryFromGML(arg)
else:
raise ValueError('Invalid geometry value: %s' % arg)
elif hasattr(arg, 'wkb'):
geom = ogr.CreateGeometryFromWkb(bytes(arg.wkb))
else:
geom = ogr.Geometry(*args, **kwargs)
if geom:
if not isinstance(srs, SpatialReference):
srs = SpatialReference(srs)
geom.AssignSpatialReference(srs)
return geom | def function[Geometry, parameter[]]:
constant[Returns an ogr.Geometry instance optionally created from a geojson str
or dict. The spatial reference may also be provided.
]
variable[arg] assign[=] <ast.BoolOp object at 0x7da1b01a4ee0>
<ast.Try object at 0x7da1b01a5660>
if call[name[hasattr], parameter[name[arg], constant[keys]]] begin[:]
variable[geom] assign[=] call[name[ogr].CreateGeometryFromJson, parameter[call[name[json].dumps, parameter[name[arg]]]]]
if name[geom] begin[:]
if <ast.UnaryOp object at 0x7da1b021ffa0> begin[:]
variable[srs] assign[=] call[name[SpatialReference], parameter[name[srs]]]
call[name[geom].AssignSpatialReference, parameter[name[srs]]]
return[name[geom]] | keyword[def] identifier[Geometry] (* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[arg] = identifier[kwargs] . identifier[pop] ( literal[string] , keyword[None] ) keyword[or] identifier[len] ( identifier[args] ) keyword[and] identifier[args] [ literal[int] ]
keyword[try] :
identifier[srs] = identifier[kwargs] . identifier[pop] ( literal[string] , keyword[None] ) keyword[or] identifier[arg] . identifier[srs] . identifier[wkt]
keyword[except] identifier[AttributeError] :
identifier[srs] = identifier[SpatialReference] ( literal[int] )
keyword[if] identifier[hasattr] ( identifier[arg] , literal[string] ):
identifier[geom] = identifier[ogr] . identifier[CreateGeometryFromJson] ( identifier[json] . identifier[dumps] ( identifier[arg] ))
keyword[elif] identifier[hasattr] ( identifier[arg] , literal[string] ):
identifier[char] = identifier[arg] [ literal[int] ] keyword[if] identifier[arg] keyword[else] literal[string]
identifier[i] = identifier[char] keyword[if] identifier[isinstance] ( identifier[char] , identifier[int] ) keyword[else] identifier[ord] ( identifier[char] )
keyword[if] identifier[i] keyword[in] ( literal[int] , literal[int] ):
identifier[geom] = identifier[ogr] . identifier[CreateGeometryFromWkb] ( identifier[arg] )
keyword[elif] identifier[arg] . identifier[startswith] ( literal[string] ):
identifier[geom] = identifier[ogr] . identifier[CreateGeometryFromJson] ( identifier[arg] )
keyword[elif] identifier[arg] . identifier[startswith] ( literal[string] ):
identifier[geom] = identifier[ogr] . identifier[CreateGeometryFromGML] ( identifier[arg] )
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] % identifier[arg] )
keyword[elif] identifier[hasattr] ( identifier[arg] , literal[string] ):
identifier[geom] = identifier[ogr] . identifier[CreateGeometryFromWkb] ( identifier[bytes] ( identifier[arg] . identifier[wkb] ))
keyword[else] :
identifier[geom] = identifier[ogr] . identifier[Geometry] (* identifier[args] ,** identifier[kwargs] )
keyword[if] identifier[geom] :
keyword[if] keyword[not] identifier[isinstance] ( identifier[srs] , identifier[SpatialReference] ):
identifier[srs] = identifier[SpatialReference] ( identifier[srs] )
identifier[geom] . identifier[AssignSpatialReference] ( identifier[srs] )
keyword[return] identifier[geom] | def Geometry(*args, **kwargs):
"""Returns an ogr.Geometry instance optionally created from a geojson str
or dict. The spatial reference may also be provided.
"""
# Look for geojson as a positional or keyword arg.
arg = kwargs.pop('geojson', None) or (len(args) and args[0])
try:
srs = kwargs.pop('srs', None) or arg.srs.wkt # depends on [control=['try'], data=[]]
except AttributeError:
srs = SpatialReference(4326) # depends on [control=['except'], data=[]]
if hasattr(arg, 'keys'):
geom = ogr.CreateGeometryFromJson(json.dumps(arg)) # depends on [control=['if'], data=[]]
elif hasattr(arg, 'startswith'):
# WKB as hexadecimal string.
char = arg[0] if arg else ' '
i = char if isinstance(char, int) else ord(char)
if i in (0, 1):
geom = ogr.CreateGeometryFromWkb(arg) # depends on [control=['if'], data=[]]
elif arg.startswith('{'):
geom = ogr.CreateGeometryFromJson(arg) # depends on [control=['if'], data=[]]
elif arg.startswith('<gml'):
geom = ogr.CreateGeometryFromGML(arg) # depends on [control=['if'], data=[]]
else:
raise ValueError('Invalid geometry value: %s' % arg) # depends on [control=['if'], data=[]]
elif hasattr(arg, 'wkb'):
geom = ogr.CreateGeometryFromWkb(bytes(arg.wkb)) # depends on [control=['if'], data=[]]
else:
geom = ogr.Geometry(*args, **kwargs)
if geom:
if not isinstance(srs, SpatialReference):
srs = SpatialReference(srs) # depends on [control=['if'], data=[]]
geom.AssignSpatialReference(srs) # depends on [control=['if'], data=[]]
return geom |
def parse_log_messages(self, text):
"""Will parse git log messages in the 'short' format"""
regex = r"commit ([0-9a-f]+)\nAuthor: (.*?)\n\n(.*?)(?:\n\n|$)"
messages = re.findall(regex, text, re.DOTALL)
parsed = []
for commit, author, message in messages:
parsed.append((
commit[:10],
re.sub(r"\s*<.*?>", "", author), # Remove email address if present
message.strip()
))
return parsed | def function[parse_log_messages, parameter[self, text]]:
constant[Will parse git log messages in the 'short' format]
variable[regex] assign[=] constant[commit ([0-9a-f]+)\nAuthor: (.*?)\n\n(.*?)(?:\n\n|$)]
variable[messages] assign[=] call[name[re].findall, parameter[name[regex], name[text], name[re].DOTALL]]
variable[parsed] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1aff1e680>, <ast.Name object at 0x7da1aff1f2b0>, <ast.Name object at 0x7da1aff1f010>]]] in starred[name[messages]] begin[:]
call[name[parsed].append, parameter[tuple[[<ast.Subscript object at 0x7da1aff1f040>, <ast.Call object at 0x7da1aff1e020>, <ast.Call object at 0x7da1aff1dcc0>]]]]
return[name[parsed]] | keyword[def] identifier[parse_log_messages] ( identifier[self] , identifier[text] ):
literal[string]
identifier[regex] = literal[string]
identifier[messages] = identifier[re] . identifier[findall] ( identifier[regex] , identifier[text] , identifier[re] . identifier[DOTALL] )
identifier[parsed] =[]
keyword[for] identifier[commit] , identifier[author] , identifier[message] keyword[in] identifier[messages] :
identifier[parsed] . identifier[append] ((
identifier[commit] [: literal[int] ],
identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[author] ),
identifier[message] . identifier[strip] ()
))
keyword[return] identifier[parsed] | def parse_log_messages(self, text):
"""Will parse git log messages in the 'short' format"""
regex = 'commit ([0-9a-f]+)\\nAuthor: (.*?)\\n\\n(.*?)(?:\\n\\n|$)'
messages = re.findall(regex, text, re.DOTALL)
parsed = []
for (commit, author, message) in messages: # Remove email address if present
parsed.append((commit[:10], re.sub('\\s*<.*?>', '', author), message.strip())) # depends on [control=['for'], data=[]]
return parsed |
def save_pid_eeprom(self):
""" saves the PID values from RAM to EEPROM
"""
pval = self.get_position_p()
ival = self.get_position_i()
dval = self.get_position_d()
#write P value
pvalue_msb = int(pval) >> 8
pvalue_lsb = int(pval) & 0xff
data_p = []
data_p.append(0x0B)
data_p.append(self.servoid)
data_p.append(EEP_WRITE_REQ)
data_p.append(POSITION_KP_EEP)
data_p.append(BYTE2)
data_p.append( pvalue_lsb)
data_p.append( pvalue_msb)
send_data(data_p)
# write I value
ivalue_msb = int(ival) >> 8
ivalue_lsb = int(ival) & 0xff
data_i = []
data_i.append(0x0B)
data_i.append(self.servoid)
data_i.append(EEP_WRITE_REQ)
data_i.append(POSITION_KI_EEP)
data_i.append(BYTE2)
data_i.append( ivalue_lsb)
data_i.append( ivalue_msb)
send_data(data_i)
# write D value
dvalue_msb = int(dval) >> 8
dvalue_lsb = int(dval) & 0xff
data_d = []
data_d.append(0x0B)
data_d.append(self.servoid)
data_d.append(EEP_WRITE_REQ)
data_d.append(POSITION_KD_EEP)
data_d.append(BYTE2)
data_d.append( dvalue_lsb)
data_d.append( dvalue_msb)
send_data(data_d) | def function[save_pid_eeprom, parameter[self]]:
constant[ saves the PID values from RAM to EEPROM
]
variable[pval] assign[=] call[name[self].get_position_p, parameter[]]
variable[ival] assign[=] call[name[self].get_position_i, parameter[]]
variable[dval] assign[=] call[name[self].get_position_d, parameter[]]
variable[pvalue_msb] assign[=] binary_operation[call[name[int], parameter[name[pval]]] <ast.RShift object at 0x7da2590d6a40> constant[8]]
variable[pvalue_lsb] assign[=] binary_operation[call[name[int], parameter[name[pval]]] <ast.BitAnd object at 0x7da2590d6b60> constant[255]]
variable[data_p] assign[=] list[[]]
call[name[data_p].append, parameter[constant[11]]]
call[name[data_p].append, parameter[name[self].servoid]]
call[name[data_p].append, parameter[name[EEP_WRITE_REQ]]]
call[name[data_p].append, parameter[name[POSITION_KP_EEP]]]
call[name[data_p].append, parameter[name[BYTE2]]]
call[name[data_p].append, parameter[name[pvalue_lsb]]]
call[name[data_p].append, parameter[name[pvalue_msb]]]
call[name[send_data], parameter[name[data_p]]]
variable[ivalue_msb] assign[=] binary_operation[call[name[int], parameter[name[ival]]] <ast.RShift object at 0x7da2590d6a40> constant[8]]
variable[ivalue_lsb] assign[=] binary_operation[call[name[int], parameter[name[ival]]] <ast.BitAnd object at 0x7da2590d6b60> constant[255]]
variable[data_i] assign[=] list[[]]
call[name[data_i].append, parameter[constant[11]]]
call[name[data_i].append, parameter[name[self].servoid]]
call[name[data_i].append, parameter[name[EEP_WRITE_REQ]]]
call[name[data_i].append, parameter[name[POSITION_KI_EEP]]]
call[name[data_i].append, parameter[name[BYTE2]]]
call[name[data_i].append, parameter[name[ivalue_lsb]]]
call[name[data_i].append, parameter[name[ivalue_msb]]]
call[name[send_data], parameter[name[data_i]]]
variable[dvalue_msb] assign[=] binary_operation[call[name[int], parameter[name[dval]]] <ast.RShift object at 0x7da2590d6a40> constant[8]]
variable[dvalue_lsb] assign[=] binary_operation[call[name[int], parameter[name[dval]]] <ast.BitAnd object at 0x7da2590d6b60> constant[255]]
variable[data_d] assign[=] list[[]]
call[name[data_d].append, parameter[constant[11]]]
call[name[data_d].append, parameter[name[self].servoid]]
call[name[data_d].append, parameter[name[EEP_WRITE_REQ]]]
call[name[data_d].append, parameter[name[POSITION_KD_EEP]]]
call[name[data_d].append, parameter[name[BYTE2]]]
call[name[data_d].append, parameter[name[dvalue_lsb]]]
call[name[data_d].append, parameter[name[dvalue_msb]]]
call[name[send_data], parameter[name[data_d]]] | keyword[def] identifier[save_pid_eeprom] ( identifier[self] ):
literal[string]
identifier[pval] = identifier[self] . identifier[get_position_p] ()
identifier[ival] = identifier[self] . identifier[get_position_i] ()
identifier[dval] = identifier[self] . identifier[get_position_d] ()
identifier[pvalue_msb] = identifier[int] ( identifier[pval] )>> literal[int]
identifier[pvalue_lsb] = identifier[int] ( identifier[pval] )& literal[int]
identifier[data_p] =[]
identifier[data_p] . identifier[append] ( literal[int] )
identifier[data_p] . identifier[append] ( identifier[self] . identifier[servoid] )
identifier[data_p] . identifier[append] ( identifier[EEP_WRITE_REQ] )
identifier[data_p] . identifier[append] ( identifier[POSITION_KP_EEP] )
identifier[data_p] . identifier[append] ( identifier[BYTE2] )
identifier[data_p] . identifier[append] ( identifier[pvalue_lsb] )
identifier[data_p] . identifier[append] ( identifier[pvalue_msb] )
identifier[send_data] ( identifier[data_p] )
identifier[ivalue_msb] = identifier[int] ( identifier[ival] )>> literal[int]
identifier[ivalue_lsb] = identifier[int] ( identifier[ival] )& literal[int]
identifier[data_i] =[]
identifier[data_i] . identifier[append] ( literal[int] )
identifier[data_i] . identifier[append] ( identifier[self] . identifier[servoid] )
identifier[data_i] . identifier[append] ( identifier[EEP_WRITE_REQ] )
identifier[data_i] . identifier[append] ( identifier[POSITION_KI_EEP] )
identifier[data_i] . identifier[append] ( identifier[BYTE2] )
identifier[data_i] . identifier[append] ( identifier[ivalue_lsb] )
identifier[data_i] . identifier[append] ( identifier[ivalue_msb] )
identifier[send_data] ( identifier[data_i] )
identifier[dvalue_msb] = identifier[int] ( identifier[dval] )>> literal[int]
identifier[dvalue_lsb] = identifier[int] ( identifier[dval] )& literal[int]
identifier[data_d] =[]
identifier[data_d] . identifier[append] ( literal[int] )
identifier[data_d] . identifier[append] ( identifier[self] . identifier[servoid] )
identifier[data_d] . identifier[append] ( identifier[EEP_WRITE_REQ] )
identifier[data_d] . identifier[append] ( identifier[POSITION_KD_EEP] )
identifier[data_d] . identifier[append] ( identifier[BYTE2] )
identifier[data_d] . identifier[append] ( identifier[dvalue_lsb] )
identifier[data_d] . identifier[append] ( identifier[dvalue_msb] )
identifier[send_data] ( identifier[data_d] ) | def save_pid_eeprom(self):
""" saves the PID values from RAM to EEPROM
"""
pval = self.get_position_p()
ival = self.get_position_i()
dval = self.get_position_d()
#write P value
pvalue_msb = int(pval) >> 8
pvalue_lsb = int(pval) & 255
data_p = []
data_p.append(11)
data_p.append(self.servoid)
data_p.append(EEP_WRITE_REQ)
data_p.append(POSITION_KP_EEP)
data_p.append(BYTE2)
data_p.append(pvalue_lsb)
data_p.append(pvalue_msb)
send_data(data_p)
# write I value
ivalue_msb = int(ival) >> 8
ivalue_lsb = int(ival) & 255
data_i = []
data_i.append(11)
data_i.append(self.servoid)
data_i.append(EEP_WRITE_REQ)
data_i.append(POSITION_KI_EEP)
data_i.append(BYTE2)
data_i.append(ivalue_lsb)
data_i.append(ivalue_msb)
send_data(data_i)
# write D value
dvalue_msb = int(dval) >> 8
dvalue_lsb = int(dval) & 255
data_d = []
data_d.append(11)
data_d.append(self.servoid)
data_d.append(EEP_WRITE_REQ)
data_d.append(POSITION_KD_EEP)
data_d.append(BYTE2)
data_d.append(dvalue_lsb)
data_d.append(dvalue_msb)
send_data(data_d) |
def tag_and_stem(text):
"""
Returns a list of (stem, tag, token) triples:
- stem: the word's uninflected form
- tag: the word's part of speech
- token: the original word, so we can reconstruct it later
"""
tokens = tokenize(text)
tagged = nltk.pos_tag(tokens)
out = []
for token, tag in tagged:
stem = morphy_stem(token, tag)
out.append((stem, tag, token))
return out | def function[tag_and_stem, parameter[text]]:
constant[
Returns a list of (stem, tag, token) triples:
- stem: the word's uninflected form
- tag: the word's part of speech
- token: the original word, so we can reconstruct it later
]
variable[tokens] assign[=] call[name[tokenize], parameter[name[text]]]
variable[tagged] assign[=] call[name[nltk].pos_tag, parameter[name[tokens]]]
variable[out] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b0fdf7f0>, <ast.Name object at 0x7da1b0fdc970>]]] in starred[name[tagged]] begin[:]
variable[stem] assign[=] call[name[morphy_stem], parameter[name[token], name[tag]]]
call[name[out].append, parameter[tuple[[<ast.Name object at 0x7da2044c1960>, <ast.Name object at 0x7da2044c3f70>, <ast.Name object at 0x7da2044c2890>]]]]
return[name[out]] | keyword[def] identifier[tag_and_stem] ( identifier[text] ):
literal[string]
identifier[tokens] = identifier[tokenize] ( identifier[text] )
identifier[tagged] = identifier[nltk] . identifier[pos_tag] ( identifier[tokens] )
identifier[out] =[]
keyword[for] identifier[token] , identifier[tag] keyword[in] identifier[tagged] :
identifier[stem] = identifier[morphy_stem] ( identifier[token] , identifier[tag] )
identifier[out] . identifier[append] (( identifier[stem] , identifier[tag] , identifier[token] ))
keyword[return] identifier[out] | def tag_and_stem(text):
"""
Returns a list of (stem, tag, token) triples:
- stem: the word's uninflected form
- tag: the word's part of speech
- token: the original word, so we can reconstruct it later
"""
tokens = tokenize(text)
tagged = nltk.pos_tag(tokens)
out = []
for (token, tag) in tagged:
stem = morphy_stem(token, tag)
out.append((stem, tag, token)) # depends on [control=['for'], data=[]]
return out |
def _setup_direct_converter(self, converter):
'''
Given a converter, set up the direct_output routes for conversions,
which is used for transcoding between similar datatypes.
'''
inputs = (
converter.direct_inputs
if hasattr(converter, 'direct_inputs')
else converter.inputs
)
for in_ in inputs:
for out in converter.direct_outputs:
self.direct_converters[(in_, out)] = converter | def function[_setup_direct_converter, parameter[self, converter]]:
constant[
Given a converter, set up the direct_output routes for conversions,
which is used for transcoding between similar datatypes.
]
variable[inputs] assign[=] <ast.IfExp object at 0x7da1b0b83970>
for taget[name[in_]] in starred[name[inputs]] begin[:]
for taget[name[out]] in starred[name[converter].direct_outputs] begin[:]
call[name[self].direct_converters][tuple[[<ast.Name object at 0x7da1b0b80eb0>, <ast.Name object at 0x7da1b0ba9870>]]] assign[=] name[converter] | keyword[def] identifier[_setup_direct_converter] ( identifier[self] , identifier[converter] ):
literal[string]
identifier[inputs] =(
identifier[converter] . identifier[direct_inputs]
keyword[if] identifier[hasattr] ( identifier[converter] , literal[string] )
keyword[else] identifier[converter] . identifier[inputs]
)
keyword[for] identifier[in_] keyword[in] identifier[inputs] :
keyword[for] identifier[out] keyword[in] identifier[converter] . identifier[direct_outputs] :
identifier[self] . identifier[direct_converters] [( identifier[in_] , identifier[out] )]= identifier[converter] | def _setup_direct_converter(self, converter):
"""
Given a converter, set up the direct_output routes for conversions,
which is used for transcoding between similar datatypes.
"""
inputs = converter.direct_inputs if hasattr(converter, 'direct_inputs') else converter.inputs
for in_ in inputs:
for out in converter.direct_outputs:
self.direct_converters[in_, out] = converter # depends on [control=['for'], data=['out']] # depends on [control=['for'], data=['in_']] |
def _format_obj(self, item=None):
""" Determines the type of the object and maps it to the correct
formatter
"""
# Order here matters, odd behavior with tuples
if item is None:
return getattr(self, 'number')(item)
elif isinstance(item, self.str_):
#: String
return item + " "
elif isinstance(item, bytes):
#: Bytes
return getattr(self, 'bytes')(item)
elif isinstance(item, self.numeric_):
#: Float, int, etc.
return getattr(self, 'number')(item)
elif isinstance(item, self.dict_):
#: Dict
return getattr(self, 'dict')(item)
elif isinstance(item, self.list_):
#: List
return getattr(self, 'list')(item)
elif isinstance(item, tuple):
#: Tuple
return getattr(self, 'tuple')(item)
elif isinstance(item, types.GeneratorType):
#: Generator
return getattr(self, 'generator')(item)
elif isinstance(item, self.set_):
#: Set
return getattr(self, 'set')(item)
elif isinstance(item, deque):
#: Deque
return getattr(self, 'deque')(item)
elif isinstance(item, Sequence):
#: Sequence
return getattr(self, 'sequence')(item)
#: Any other object
return getattr(self, 'object')(item) | def function[_format_obj, parameter[self, item]]:
constant[ Determines the type of the object and maps it to the correct
formatter
]
if compare[name[item] is constant[None]] begin[:]
return[call[call[name[getattr], parameter[name[self], constant[number]]], parameter[name[item]]]]
return[call[call[name[getattr], parameter[name[self], constant[object]]], parameter[name[item]]]] | keyword[def] identifier[_format_obj] ( identifier[self] , identifier[item] = keyword[None] ):
literal[string]
keyword[if] identifier[item] keyword[is] keyword[None] :
keyword[return] identifier[getattr] ( identifier[self] , literal[string] )( identifier[item] )
keyword[elif] identifier[isinstance] ( identifier[item] , identifier[self] . identifier[str_] ):
keyword[return] identifier[item] + literal[string]
keyword[elif] identifier[isinstance] ( identifier[item] , identifier[bytes] ):
keyword[return] identifier[getattr] ( identifier[self] , literal[string] )( identifier[item] )
keyword[elif] identifier[isinstance] ( identifier[item] , identifier[self] . identifier[numeric_] ):
keyword[return] identifier[getattr] ( identifier[self] , literal[string] )( identifier[item] )
keyword[elif] identifier[isinstance] ( identifier[item] , identifier[self] . identifier[dict_] ):
keyword[return] identifier[getattr] ( identifier[self] , literal[string] )( identifier[item] )
keyword[elif] identifier[isinstance] ( identifier[item] , identifier[self] . identifier[list_] ):
keyword[return] identifier[getattr] ( identifier[self] , literal[string] )( identifier[item] )
keyword[elif] identifier[isinstance] ( identifier[item] , identifier[tuple] ):
keyword[return] identifier[getattr] ( identifier[self] , literal[string] )( identifier[item] )
keyword[elif] identifier[isinstance] ( identifier[item] , identifier[types] . identifier[GeneratorType] ):
keyword[return] identifier[getattr] ( identifier[self] , literal[string] )( identifier[item] )
keyword[elif] identifier[isinstance] ( identifier[item] , identifier[self] . identifier[set_] ):
keyword[return] identifier[getattr] ( identifier[self] , literal[string] )( identifier[item] )
keyword[elif] identifier[isinstance] ( identifier[item] , identifier[deque] ):
keyword[return] identifier[getattr] ( identifier[self] , literal[string] )( identifier[item] )
keyword[elif] identifier[isinstance] ( identifier[item] , identifier[Sequence] ):
keyword[return] identifier[getattr] ( identifier[self] , literal[string] )( identifier[item] )
keyword[return] identifier[getattr] ( identifier[self] , literal[string] )( identifier[item] ) | def _format_obj(self, item=None):
""" Determines the type of the object and maps it to the correct
formatter
"""
# Order here matters, odd behavior with tuples
if item is None:
return getattr(self, 'number')(item) # depends on [control=['if'], data=['item']]
elif isinstance(item, self.str_):
#: String
return item + ' ' # depends on [control=['if'], data=[]]
elif isinstance(item, bytes):
#: Bytes
return getattr(self, 'bytes')(item) # depends on [control=['if'], data=[]]
elif isinstance(item, self.numeric_):
#: Float, int, etc.
return getattr(self, 'number')(item) # depends on [control=['if'], data=[]]
elif isinstance(item, self.dict_):
#: Dict
return getattr(self, 'dict')(item) # depends on [control=['if'], data=[]]
elif isinstance(item, self.list_):
#: List
return getattr(self, 'list')(item) # depends on [control=['if'], data=[]]
elif isinstance(item, tuple):
#: Tuple
return getattr(self, 'tuple')(item) # depends on [control=['if'], data=[]]
elif isinstance(item, types.GeneratorType):
#: Generator
return getattr(self, 'generator')(item) # depends on [control=['if'], data=[]]
elif isinstance(item, self.set_):
#: Set
return getattr(self, 'set')(item) # depends on [control=['if'], data=[]]
elif isinstance(item, deque):
#: Deque
return getattr(self, 'deque')(item) # depends on [control=['if'], data=[]]
elif isinstance(item, Sequence):
#: Sequence
return getattr(self, 'sequence')(item) # depends on [control=['if'], data=[]]
#: Any other object
return getattr(self, 'object')(item) |
def to_bytes_36(self, previous: bytes):
"""
A to-bytes specific to Python 3.6 and above.
"""
# Calculations ahead.
bc = b""
# Calculate the length of the iterator.
it_bc = util.generate_bytecode_from_obb(self.iterator, previous)
bc += it_bc
bc += util.ensure_instruction(tokens.GET_ITER) | def function[to_bytes_36, parameter[self, previous]]:
constant[
A to-bytes specific to Python 3.6 and above.
]
variable[bc] assign[=] constant[b'']
variable[it_bc] assign[=] call[name[util].generate_bytecode_from_obb, parameter[name[self].iterator, name[previous]]]
<ast.AugAssign object at 0x7da1aff1d4b0>
<ast.AugAssign object at 0x7da1aff1d960> | keyword[def] identifier[to_bytes_36] ( identifier[self] , identifier[previous] : identifier[bytes] ):
literal[string]
identifier[bc] = literal[string]
identifier[it_bc] = identifier[util] . identifier[generate_bytecode_from_obb] ( identifier[self] . identifier[iterator] , identifier[previous] )
identifier[bc] += identifier[it_bc]
identifier[bc] += identifier[util] . identifier[ensure_instruction] ( identifier[tokens] . identifier[GET_ITER] ) | def to_bytes_36(self, previous: bytes):
"""
A to-bytes specific to Python 3.6 and above.
"""
# Calculations ahead.
bc = b''
# Calculate the length of the iterator.
it_bc = util.generate_bytecode_from_obb(self.iterator, previous)
bc += it_bc
bc += util.ensure_instruction(tokens.GET_ITER) |
def decode_abi(self, types: Iterable[TypeStr], data: Decodable) -> Tuple[Any, ...]:
"""
Decodes the binary value ``data`` as a sequence of values of the ABI types
in ``types`` via the head-tail mechanism into a tuple of equivalent python
values.
:param types: An iterable of string representations of the ABI types that
will be used for decoding e.g. ``('uint256', 'bytes[]', '(int,int)')``
:param data: The binary value to be decoded.
:returns: A tuple of equivalent python values for the ABI values
represented in ``data``.
"""
if not is_bytes(data):
raise TypeError("The `data` value must be of bytes type. Got {0}".format(type(data)))
decoders = [
self._registry.get_decoder(type_str)
for type_str in types
]
decoder = TupleDecoder(decoders=decoders)
stream = ContextFramesBytesIO(data)
return decoder(stream) | def function[decode_abi, parameter[self, types, data]]:
constant[
Decodes the binary value ``data`` as a sequence of values of the ABI types
in ``types`` via the head-tail mechanism into a tuple of equivalent python
values.
:param types: An iterable of string representations of the ABI types that
will be used for decoding e.g. ``('uint256', 'bytes[]', '(int,int)')``
:param data: The binary value to be decoded.
:returns: A tuple of equivalent python values for the ABI values
represented in ``data``.
]
if <ast.UnaryOp object at 0x7da18bccbca0> begin[:]
<ast.Raise object at 0x7da18bcc84f0>
variable[decoders] assign[=] <ast.ListComp object at 0x7da20e9b33d0>
variable[decoder] assign[=] call[name[TupleDecoder], parameter[]]
variable[stream] assign[=] call[name[ContextFramesBytesIO], parameter[name[data]]]
return[call[name[decoder], parameter[name[stream]]]] | keyword[def] identifier[decode_abi] ( identifier[self] , identifier[types] : identifier[Iterable] [ identifier[TypeStr] ], identifier[data] : identifier[Decodable] )-> identifier[Tuple] [ identifier[Any] ,...]:
literal[string]
keyword[if] keyword[not] identifier[is_bytes] ( identifier[data] ):
keyword[raise] identifier[TypeError] ( literal[string] . identifier[format] ( identifier[type] ( identifier[data] )))
identifier[decoders] =[
identifier[self] . identifier[_registry] . identifier[get_decoder] ( identifier[type_str] )
keyword[for] identifier[type_str] keyword[in] identifier[types]
]
identifier[decoder] = identifier[TupleDecoder] ( identifier[decoders] = identifier[decoders] )
identifier[stream] = identifier[ContextFramesBytesIO] ( identifier[data] )
keyword[return] identifier[decoder] ( identifier[stream] ) | def decode_abi(self, types: Iterable[TypeStr], data: Decodable) -> Tuple[Any, ...]:
"""
Decodes the binary value ``data`` as a sequence of values of the ABI types
in ``types`` via the head-tail mechanism into a tuple of equivalent python
values.
:param types: An iterable of string representations of the ABI types that
will be used for decoding e.g. ``('uint256', 'bytes[]', '(int,int)')``
:param data: The binary value to be decoded.
:returns: A tuple of equivalent python values for the ABI values
represented in ``data``.
"""
if not is_bytes(data):
raise TypeError('The `data` value must be of bytes type. Got {0}'.format(type(data))) # depends on [control=['if'], data=[]]
decoders = [self._registry.get_decoder(type_str) for type_str in types]
decoder = TupleDecoder(decoders=decoders)
stream = ContextFramesBytesIO(data)
return decoder(stream) |
def remove_environment(environment_var_name, system=False):
"""
Remove the specified environment setting from the appropriate config file.
:param environment_var_name: The name of the environment setting to remove.
:keyword system: Set to True to modify the system configuration file.
If not set, the user config file will be modified.
"""
config_filename = \
_SYSTEM_CONFIG_FILE if system is True else _USER_CONFIG_FILE
config = _read_config(config_filename)
section = _ENVIRONMENT_SECTION_NAME
config.remove_option(section, environment_var_name)
_write_config(config, config_filename) | def function[remove_environment, parameter[environment_var_name, system]]:
constant[
Remove the specified environment setting from the appropriate config file.
:param environment_var_name: The name of the environment setting to remove.
:keyword system: Set to True to modify the system configuration file.
If not set, the user config file will be modified.
]
variable[config_filename] assign[=] <ast.IfExp object at 0x7da1b256e050>
variable[config] assign[=] call[name[_read_config], parameter[name[config_filename]]]
variable[section] assign[=] name[_ENVIRONMENT_SECTION_NAME]
call[name[config].remove_option, parameter[name[section], name[environment_var_name]]]
call[name[_write_config], parameter[name[config], name[config_filename]]] | keyword[def] identifier[remove_environment] ( identifier[environment_var_name] , identifier[system] = keyword[False] ):
literal[string]
identifier[config_filename] = identifier[_SYSTEM_CONFIG_FILE] keyword[if] identifier[system] keyword[is] keyword[True] keyword[else] identifier[_USER_CONFIG_FILE]
identifier[config] = identifier[_read_config] ( identifier[config_filename] )
identifier[section] = identifier[_ENVIRONMENT_SECTION_NAME]
identifier[config] . identifier[remove_option] ( identifier[section] , identifier[environment_var_name] )
identifier[_write_config] ( identifier[config] , identifier[config_filename] ) | def remove_environment(environment_var_name, system=False):
"""
Remove the specified environment setting from the appropriate config file.
:param environment_var_name: The name of the environment setting to remove.
:keyword system: Set to True to modify the system configuration file.
If not set, the user config file will be modified.
"""
config_filename = _SYSTEM_CONFIG_FILE if system is True else _USER_CONFIG_FILE
config = _read_config(config_filename)
section = _ENVIRONMENT_SECTION_NAME
config.remove_option(section, environment_var_name)
_write_config(config, config_filename) |
def create_cfg_segment(filename, filecontent, description, auth, url):
'''
Takes a str into var filecontent which represents the entire content of a configuration segment, or partial
configuration file. Takes a str into var description which represents the description of the configuration segment
:param filename: str containing the name of the configuration segment.
:param filecontent: str containing the entire contents of the configuration segment
:param description: str contrianing the description of the configuration segment
:param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class
:param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass
:return: If successful, Boolena of type True
:rtype: Boolean
>>> from pyhpeimc.auth import *
>>> from pyhpeimc.plat.icc import *
>>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin")
>>> filecontent = ("""sample file content""")
>>> create_new_file = create_cfg_segment('CW7SNMP.cfg', filecontent, 'My New Template', auth.creds, auth.url)
>>> template_id = get_template_id('CW7SNMP.cfg', auth.creds, auth.url)
>>> assert type(template_id) is str
>>>
'''
payload = {"confFileName": filename,
"confFileType": "2",
"cfgFileParent": "-1",
"confFileDesc": description,
"content": filecontent}
create_cfg_segment_url = "/imcrs/icc/confFile"
f_url = url + create_cfg_segment_url
# creates the URL using the payload variable as the contents
r = requests.post(f_url,data= (json.dumps(payload)), auth=auth, headers=HEADERS)
try:
if r.status_code == 201:
return True
except requests.exceptions.RequestException as e:
return "Error:\n" + str(e) + " create_cfg_segment: An Error has occured" | def function[create_cfg_segment, parameter[filename, filecontent, description, auth, url]]:
constant[
Takes a str into var filecontent which represents the entire content of a configuration segment, or partial
configuration file. Takes a str into var description which represents the description of the configuration segment
:param filename: str containing the name of the configuration segment.
:param filecontent: str containing the entire contents of the configuration segment
:param description: str contrianing the description of the configuration segment
:param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class
:param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass
:return: If successful, Boolena of type True
:rtype: Boolean
>>> from pyhpeimc.auth import *
>>> from pyhpeimc.plat.icc import *
>>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin")
>>> filecontent = ("""sample file content""")
>>> create_new_file = create_cfg_segment('CW7SNMP.cfg', filecontent, 'My New Template', auth.creds, auth.url)
>>> template_id = get_template_id('CW7SNMP.cfg', auth.creds, auth.url)
>>> assert type(template_id) is str
>>>
]
variable[payload] assign[=] dictionary[[<ast.Constant object at 0x7da20c794a60>, <ast.Constant object at 0x7da20c795ff0>, <ast.Constant object at 0x7da20c796410>, <ast.Constant object at 0x7da20c795fc0>, <ast.Constant object at 0x7da20c795180>], [<ast.Name object at 0x7da20c796230>, <ast.Constant object at 0x7da20c794fa0>, <ast.Constant object at 0x7da20c795b10>, <ast.Name object at 0x7da20c7953c0>, <ast.Name object at 0x7da20c796260>]]
variable[create_cfg_segment_url] assign[=] constant[/imcrs/icc/confFile]
variable[f_url] assign[=] binary_operation[name[url] + name[create_cfg_segment_url]]
variable[r] assign[=] call[name[requests].post, parameter[name[f_url]]]
<ast.Try object at 0x7da20c795e40> | keyword[def] identifier[create_cfg_segment] ( identifier[filename] , identifier[filecontent] , identifier[description] , identifier[auth] , identifier[url] ):
literal[string]
identifier[payload] ={ literal[string] : identifier[filename] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : identifier[description] ,
literal[string] : identifier[filecontent] }
identifier[create_cfg_segment_url] = literal[string]
identifier[f_url] = identifier[url] + identifier[create_cfg_segment_url]
identifier[r] = identifier[requests] . identifier[post] ( identifier[f_url] , identifier[data] =( identifier[json] . identifier[dumps] ( identifier[payload] )), identifier[auth] = identifier[auth] , identifier[headers] = identifier[HEADERS] )
keyword[try] :
keyword[if] identifier[r] . identifier[status_code] == literal[int] :
keyword[return] keyword[True]
keyword[except] identifier[requests] . identifier[exceptions] . identifier[RequestException] keyword[as] identifier[e] :
keyword[return] literal[string] + identifier[str] ( identifier[e] )+ literal[string] | def create_cfg_segment(filename, filecontent, description, auth, url):
'''
Takes a str into var filecontent which represents the entire content of a configuration segment, or partial
configuration file. Takes a str into var description which represents the description of the configuration segment
:param filename: str containing the name of the configuration segment.
:param filecontent: str containing the entire contents of the configuration segment
:param description: str contrianing the description of the configuration segment
:param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class
:param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass
:return: If successful, Boolena of type True
:rtype: Boolean
>>> from pyhpeimc.auth import *
>>> from pyhpeimc.plat.icc import *
>>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin")
>>> filecontent = ("""sample file content""")
>>> create_new_file = create_cfg_segment('CW7SNMP.cfg', filecontent, 'My New Template', auth.creds, auth.url)
>>> template_id = get_template_id('CW7SNMP.cfg', auth.creds, auth.url)
>>> assert type(template_id) is str
>>>
'''
payload = {'confFileName': filename, 'confFileType': '2', 'cfgFileParent': '-1', 'confFileDesc': description, 'content': filecontent}
create_cfg_segment_url = '/imcrs/icc/confFile'
f_url = url + create_cfg_segment_url
# creates the URL using the payload variable as the contents
r = requests.post(f_url, data=json.dumps(payload), auth=auth, headers=HEADERS)
try:
if r.status_code == 201:
return True # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except requests.exceptions.RequestException as e:
return 'Error:\n' + str(e) + ' create_cfg_segment: An Error has occured' # depends on [control=['except'], data=['e']] |
def stats(self, details=False):
"""Get statistics and information from the daemon
Returns an object with the daemon identity, the daemon start_time
and some extra properties depending upon the daemon type.
All daemons provide these ones:
- program_start: the Alignak start timestamp
- spare: to indicate if the daemon is a spare one
- load: the daemon load
- modules: the daemon modules information
- counters: the specific daemon counters
:param details: Details are required (different from 0)
:type details str
:return: daemon stats
:rtype: dict
"""
if details is not False:
details = bool(details)
res = self.identity()
res.update(self.app.get_daemon_stats(details=details))
return res | def function[stats, parameter[self, details]]:
constant[Get statistics and information from the daemon
Returns an object with the daemon identity, the daemon start_time
and some extra properties depending upon the daemon type.
All daemons provide these ones:
- program_start: the Alignak start timestamp
- spare: to indicate if the daemon is a spare one
- load: the daemon load
- modules: the daemon modules information
- counters: the specific daemon counters
:param details: Details are required (different from 0)
:type details str
:return: daemon stats
:rtype: dict
]
if compare[name[details] is_not constant[False]] begin[:]
variable[details] assign[=] call[name[bool], parameter[name[details]]]
variable[res] assign[=] call[name[self].identity, parameter[]]
call[name[res].update, parameter[call[name[self].app.get_daemon_stats, parameter[]]]]
return[name[res]] | keyword[def] identifier[stats] ( identifier[self] , identifier[details] = keyword[False] ):
literal[string]
keyword[if] identifier[details] keyword[is] keyword[not] keyword[False] :
identifier[details] = identifier[bool] ( identifier[details] )
identifier[res] = identifier[self] . identifier[identity] ()
identifier[res] . identifier[update] ( identifier[self] . identifier[app] . identifier[get_daemon_stats] ( identifier[details] = identifier[details] ))
keyword[return] identifier[res] | def stats(self, details=False):
"""Get statistics and information from the daemon
Returns an object with the daemon identity, the daemon start_time
and some extra properties depending upon the daemon type.
All daemons provide these ones:
- program_start: the Alignak start timestamp
- spare: to indicate if the daemon is a spare one
- load: the daemon load
- modules: the daemon modules information
- counters: the specific daemon counters
:param details: Details are required (different from 0)
:type details str
:return: daemon stats
:rtype: dict
"""
if details is not False:
details = bool(details) # depends on [control=['if'], data=['details']]
res = self.identity()
res.update(self.app.get_daemon_stats(details=details))
return res |
def create_storage_class(self, body, **kwargs): # noqa: E501
"""create_storage_class # noqa: E501
create a StorageClass # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_storage_class(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param V1StorageClass body: (required)
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:return: V1StorageClass
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_storage_class_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.create_storage_class_with_http_info(body, **kwargs) # noqa: E501
return data | def function[create_storage_class, parameter[self, body]]:
constant[create_storage_class # noqa: E501
create a StorageClass # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_storage_class(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param V1StorageClass body: (required)
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:return: V1StorageClass
If the method is called asynchronously,
returns the request thread.
]
call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True]
if call[name[kwargs].get, parameter[constant[async_req]]] begin[:]
return[call[name[self].create_storage_class_with_http_info, parameter[name[body]]]] | keyword[def] identifier[create_storage_class] ( identifier[self] , identifier[body] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] [ literal[string] ]= keyword[True]
keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ):
keyword[return] identifier[self] . identifier[create_storage_class_with_http_info] ( identifier[body] ,** identifier[kwargs] )
keyword[else] :
( identifier[data] )= identifier[self] . identifier[create_storage_class_with_http_info] ( identifier[body] ,** identifier[kwargs] )
keyword[return] identifier[data] | def create_storage_class(self, body, **kwargs): # noqa: E501
"create_storage_class # noqa: E501\n\n create a StorageClass # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.create_storage_class(body, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param V1StorageClass body: (required)\n :param bool include_uninitialized: If true, partially initialized resources are included in the response.\n :param str pretty: If 'true', then the output is pretty printed.\n :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed\n :return: V1StorageClass\n If the method is called asynchronously,\n returns the request thread.\n "
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_storage_class_with_http_info(body, **kwargs) # noqa: E501 # depends on [control=['if'], data=[]]
else:
data = self.create_storage_class_with_http_info(body, **kwargs) # noqa: E501
return data |
def Operation(self, x, y):
"""Whether x is fully contained in y."""
if x in y:
return True
# x might be an iterable
# first we need to skip strings or we'll do silly things
if isinstance(x, string_types) or isinstance(x, bytes):
return False
try:
for value in x:
if value not in y:
return False
return True
except TypeError:
return False | def function[Operation, parameter[self, x, y]]:
constant[Whether x is fully contained in y.]
if compare[name[x] in name[y]] begin[:]
return[constant[True]]
if <ast.BoolOp object at 0x7da204621f60> begin[:]
return[constant[False]]
<ast.Try object at 0x7da204620dc0> | keyword[def] identifier[Operation] ( identifier[self] , identifier[x] , identifier[y] ):
literal[string]
keyword[if] identifier[x] keyword[in] identifier[y] :
keyword[return] keyword[True]
keyword[if] identifier[isinstance] ( identifier[x] , identifier[string_types] ) keyword[or] identifier[isinstance] ( identifier[x] , identifier[bytes] ):
keyword[return] keyword[False]
keyword[try] :
keyword[for] identifier[value] keyword[in] identifier[x] :
keyword[if] identifier[value] keyword[not] keyword[in] identifier[y] :
keyword[return] keyword[False]
keyword[return] keyword[True]
keyword[except] identifier[TypeError] :
keyword[return] keyword[False] | def Operation(self, x, y):
"""Whether x is fully contained in y."""
if x in y:
return True # depends on [control=['if'], data=[]]
# x might be an iterable
# first we need to skip strings or we'll do silly things
if isinstance(x, string_types) or isinstance(x, bytes):
return False # depends on [control=['if'], data=[]]
try:
for value in x:
if value not in y:
return False # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['value']]
return True # depends on [control=['try'], data=[]]
except TypeError:
return False # depends on [control=['except'], data=[]] |
def set_results(self, results: str):
"""
This method set the results attribute for the SASdata object; it stays in effect till changed
results - set the default result type for this SASdata object. 'Pandas' or 'HTML' or 'TEXT'.
:param results: format of results, SASsession.results is default, PANDAS, HTML or TEXT are the alternatives
:return: None
"""
if results.upper() == "HTML":
self.HTML = 1
else:
self.HTML = 0
self.results = results | def function[set_results, parameter[self, results]]:
constant[
This method set the results attribute for the SASdata object; it stays in effect till changed
results - set the default result type for this SASdata object. 'Pandas' or 'HTML' or 'TEXT'.
:param results: format of results, SASsession.results is default, PANDAS, HTML or TEXT are the alternatives
:return: None
]
if compare[call[name[results].upper, parameter[]] equal[==] constant[HTML]] begin[:]
name[self].HTML assign[=] constant[1]
name[self].results assign[=] name[results] | keyword[def] identifier[set_results] ( identifier[self] , identifier[results] : identifier[str] ):
literal[string]
keyword[if] identifier[results] . identifier[upper] ()== literal[string] :
identifier[self] . identifier[HTML] = literal[int]
keyword[else] :
identifier[self] . identifier[HTML] = literal[int]
identifier[self] . identifier[results] = identifier[results] | def set_results(self, results: str):
"""
This method set the results attribute for the SASdata object; it stays in effect till changed
results - set the default result type for this SASdata object. 'Pandas' or 'HTML' or 'TEXT'.
:param results: format of results, SASsession.results is default, PANDAS, HTML or TEXT are the alternatives
:return: None
"""
if results.upper() == 'HTML':
self.HTML = 1 # depends on [control=['if'], data=[]]
else:
self.HTML = 0
self.results = results |
def despike(self, expdecay_despiker=True, exponent=None,
noise_despiker=True, win=3, nlim=12., maxiter=3):
"""
Applies expdecay_despiker and noise_despiker to data.
Parameters
----------
expdecay_despiker : bool
Whether or not to apply the exponential decay filter.
exponent : None or float
The exponent for the exponential decay filter. If None,
it is determined automatically using `find_expocoef`.
noise_despiker : bool
Whether or not to apply the standard deviation spike filter.
win : int
The rolling window over which the spike filter calculates
the trace statistics.
nlim : float
The number of standard deviations above the rolling mean
that data are excluded.
maxiter : int
The max number of times that the fitler is applied.
Returns
-------
None
"""
if not hasattr(self, 'despiked'):
self.data['despiked'] = Bunch()
out = {}
for a, v in self.focus.items():
if 'time' not in a.lower():
sig = v.copy() # copy data
if expdecay_despiker:
if exponent is not None:
sig = proc.expdecay_despike(sig, exponent, self.tstep, maxiter)
else:
warnings.warn('exponent is None - either provide exponent, or run at `analyse`\nlevel to automatically calculate it.')
if noise_despiker:
sig = proc.noise_despike(sig, int(win), nlim, maxiter)
out[a] = sig
self.data['despiked'].update(out)
# recalculate total counts
self.data['total_counts'] = sum(self.data['despiked'].values())
self.setfocus('despiked')
return | def function[despike, parameter[self, expdecay_despiker, exponent, noise_despiker, win, nlim, maxiter]]:
constant[
Applies expdecay_despiker and noise_despiker to data.
Parameters
----------
expdecay_despiker : bool
Whether or not to apply the exponential decay filter.
exponent : None or float
The exponent for the exponential decay filter. If None,
it is determined automatically using `find_expocoef`.
noise_despiker : bool
Whether or not to apply the standard deviation spike filter.
win : int
The rolling window over which the spike filter calculates
the trace statistics.
nlim : float
The number of standard deviations above the rolling mean
that data are excluded.
maxiter : int
The max number of times that the fitler is applied.
Returns
-------
None
]
if <ast.UnaryOp object at 0x7da1b02f0460> begin[:]
call[name[self].data][constant[despiked]] assign[=] call[name[Bunch], parameter[]]
variable[out] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da1b02f2320>, <ast.Name object at 0x7da1b02f2710>]]] in starred[call[name[self].focus.items, parameter[]]] begin[:]
if compare[constant[time] <ast.NotIn object at 0x7da2590d7190> call[name[a].lower, parameter[]]] begin[:]
variable[sig] assign[=] call[name[v].copy, parameter[]]
if name[expdecay_despiker] begin[:]
if compare[name[exponent] is_not constant[None]] begin[:]
variable[sig] assign[=] call[name[proc].expdecay_despike, parameter[name[sig], name[exponent], name[self].tstep, name[maxiter]]]
if name[noise_despiker] begin[:]
variable[sig] assign[=] call[name[proc].noise_despike, parameter[name[sig], call[name[int], parameter[name[win]]], name[nlim], name[maxiter]]]
call[name[out]][name[a]] assign[=] name[sig]
call[call[name[self].data][constant[despiked]].update, parameter[name[out]]]
call[name[self].data][constant[total_counts]] assign[=] call[name[sum], parameter[call[call[name[self].data][constant[despiked]].values, parameter[]]]]
call[name[self].setfocus, parameter[constant[despiked]]]
return[None] | keyword[def] identifier[despike] ( identifier[self] , identifier[expdecay_despiker] = keyword[True] , identifier[exponent] = keyword[None] ,
identifier[noise_despiker] = keyword[True] , identifier[win] = literal[int] , identifier[nlim] = literal[int] , identifier[maxiter] = literal[int] ):
literal[string]
keyword[if] keyword[not] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[data] [ literal[string] ]= identifier[Bunch] ()
identifier[out] ={}
keyword[for] identifier[a] , identifier[v] keyword[in] identifier[self] . identifier[focus] . identifier[items] ():
keyword[if] literal[string] keyword[not] keyword[in] identifier[a] . identifier[lower] ():
identifier[sig] = identifier[v] . identifier[copy] ()
keyword[if] identifier[expdecay_despiker] :
keyword[if] identifier[exponent] keyword[is] keyword[not] keyword[None] :
identifier[sig] = identifier[proc] . identifier[expdecay_despike] ( identifier[sig] , identifier[exponent] , identifier[self] . identifier[tstep] , identifier[maxiter] )
keyword[else] :
identifier[warnings] . identifier[warn] ( literal[string] )
keyword[if] identifier[noise_despiker] :
identifier[sig] = identifier[proc] . identifier[noise_despike] ( identifier[sig] , identifier[int] ( identifier[win] ), identifier[nlim] , identifier[maxiter] )
identifier[out] [ identifier[a] ]= identifier[sig]
identifier[self] . identifier[data] [ literal[string] ]. identifier[update] ( identifier[out] )
identifier[self] . identifier[data] [ literal[string] ]= identifier[sum] ( identifier[self] . identifier[data] [ literal[string] ]. identifier[values] ())
identifier[self] . identifier[setfocus] ( literal[string] )
keyword[return] | def despike(self, expdecay_despiker=True, exponent=None, noise_despiker=True, win=3, nlim=12.0, maxiter=3):
"""
Applies expdecay_despiker and noise_despiker to data.
Parameters
----------
expdecay_despiker : bool
Whether or not to apply the exponential decay filter.
exponent : None or float
The exponent for the exponential decay filter. If None,
it is determined automatically using `find_expocoef`.
noise_despiker : bool
Whether or not to apply the standard deviation spike filter.
win : int
The rolling window over which the spike filter calculates
the trace statistics.
nlim : float
The number of standard deviations above the rolling mean
that data are excluded.
maxiter : int
The max number of times that the fitler is applied.
Returns
-------
None
"""
if not hasattr(self, 'despiked'):
self.data['despiked'] = Bunch() # depends on [control=['if'], data=[]]
out = {}
for (a, v) in self.focus.items():
if 'time' not in a.lower():
sig = v.copy() # copy data
if expdecay_despiker:
if exponent is not None:
sig = proc.expdecay_despike(sig, exponent, self.tstep, maxiter) # depends on [control=['if'], data=['exponent']]
else:
warnings.warn('exponent is None - either provide exponent, or run at `analyse`\nlevel to automatically calculate it.') # depends on [control=['if'], data=[]]
if noise_despiker:
sig = proc.noise_despike(sig, int(win), nlim, maxiter) # depends on [control=['if'], data=[]]
out[a] = sig # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
self.data['despiked'].update(out)
# recalculate total counts
self.data['total_counts'] = sum(self.data['despiked'].values())
self.setfocus('despiked')
return |
def prepare_parameters(self, multi_row_parameters):
""" Attribute sql parameters with meta data for a prepared statement.
Make some basic checks that at least the number of parameters is correct.
:param multi_row_parameters: A list/tuple containing list/tuples of parameters (for multiple rows)
:returns: A generator producing parameters attributed with meta data for one sql statement (a row) at a time
"""
self._multi_row_parameters = multi_row_parameters
self._num_rows = len(multi_row_parameters)
self._iter_row_count = 0
return self | def function[prepare_parameters, parameter[self, multi_row_parameters]]:
constant[ Attribute sql parameters with meta data for a prepared statement.
Make some basic checks that at least the number of parameters is correct.
:param multi_row_parameters: A list/tuple containing list/tuples of parameters (for multiple rows)
:returns: A generator producing parameters attributed with meta data for one sql statement (a row) at a time
]
name[self]._multi_row_parameters assign[=] name[multi_row_parameters]
name[self]._num_rows assign[=] call[name[len], parameter[name[multi_row_parameters]]]
name[self]._iter_row_count assign[=] constant[0]
return[name[self]] | keyword[def] identifier[prepare_parameters] ( identifier[self] , identifier[multi_row_parameters] ):
literal[string]
identifier[self] . identifier[_multi_row_parameters] = identifier[multi_row_parameters]
identifier[self] . identifier[_num_rows] = identifier[len] ( identifier[multi_row_parameters] )
identifier[self] . identifier[_iter_row_count] = literal[int]
keyword[return] identifier[self] | def prepare_parameters(self, multi_row_parameters):
""" Attribute sql parameters with meta data for a prepared statement.
Make some basic checks that at least the number of parameters is correct.
:param multi_row_parameters: A list/tuple containing list/tuples of parameters (for multiple rows)
:returns: A generator producing parameters attributed with meta data for one sql statement (a row) at a time
"""
self._multi_row_parameters = multi_row_parameters
self._num_rows = len(multi_row_parameters)
self._iter_row_count = 0
return self |
def do_call(self, parser: BasicParser) -> Node:
"""
The Decorator call is the one that actually pushes/pops
the decorator in the active decorators list (parsing._decorators)
"""
valueparam = []
for v, t in self.param:
if t is Node:
valueparam.append(parser.rule_nodes[v])
elif type(v) is t:
valueparam.append(v)
else:
raise TypeError(
"Type mismatch expected {} got {}".format(t, type(v)))
if not self.checkParam(self.decorator_class, valueparam):
return False
decorator = self.decorator_class(*valueparam)
global _decorators
_decorators.append(decorator)
res = self.pt(parser)
_decorators.pop()
return res | def function[do_call, parameter[self, parser]]:
constant[
The Decorator call is the one that actually pushes/pops
the decorator in the active decorators list (parsing._decorators)
]
variable[valueparam] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b01fd120>, <ast.Name object at 0x7da1b01fd540>]]] in starred[name[self].param] begin[:]
if compare[name[t] is name[Node]] begin[:]
call[name[valueparam].append, parameter[call[name[parser].rule_nodes][name[v]]]]
if <ast.UnaryOp object at 0x7da1b01feb00> begin[:]
return[constant[False]]
variable[decorator] assign[=] call[name[self].decorator_class, parameter[<ast.Starred object at 0x7da1b01fc940>]]
<ast.Global object at 0x7da1b01fc670>
call[name[_decorators].append, parameter[name[decorator]]]
variable[res] assign[=] call[name[self].pt, parameter[name[parser]]]
call[name[_decorators].pop, parameter[]]
return[name[res]] | keyword[def] identifier[do_call] ( identifier[self] , identifier[parser] : identifier[BasicParser] )-> identifier[Node] :
literal[string]
identifier[valueparam] =[]
keyword[for] identifier[v] , identifier[t] keyword[in] identifier[self] . identifier[param] :
keyword[if] identifier[t] keyword[is] identifier[Node] :
identifier[valueparam] . identifier[append] ( identifier[parser] . identifier[rule_nodes] [ identifier[v] ])
keyword[elif] identifier[type] ( identifier[v] ) keyword[is] identifier[t] :
identifier[valueparam] . identifier[append] ( identifier[v] )
keyword[else] :
keyword[raise] identifier[TypeError] (
literal[string] . identifier[format] ( identifier[t] , identifier[type] ( identifier[v] )))
keyword[if] keyword[not] identifier[self] . identifier[checkParam] ( identifier[self] . identifier[decorator_class] , identifier[valueparam] ):
keyword[return] keyword[False]
identifier[decorator] = identifier[self] . identifier[decorator_class] (* identifier[valueparam] )
keyword[global] identifier[_decorators]
identifier[_decorators] . identifier[append] ( identifier[decorator] )
identifier[res] = identifier[self] . identifier[pt] ( identifier[parser] )
identifier[_decorators] . identifier[pop] ()
keyword[return] identifier[res] | def do_call(self, parser: BasicParser) -> Node:
"""
The Decorator call is the one that actually pushes/pops
the decorator in the active decorators list (parsing._decorators)
"""
valueparam = []
for (v, t) in self.param:
if t is Node:
valueparam.append(parser.rule_nodes[v]) # depends on [control=['if'], data=[]]
elif type(v) is t:
valueparam.append(v) # depends on [control=['if'], data=[]]
else:
raise TypeError('Type mismatch expected {} got {}'.format(t, type(v))) # depends on [control=['for'], data=[]]
if not self.checkParam(self.decorator_class, valueparam):
return False # depends on [control=['if'], data=[]]
decorator = self.decorator_class(*valueparam)
global _decorators
_decorators.append(decorator)
res = self.pt(parser)
_decorators.pop()
return res |
def draw_jointplot(figname, x, y, data=None, kind="reg", color=None,
xlim=None, ylim=None, format="pdf"):
"""
Wraps around sns.jointplot
"""
import seaborn as sns
sns.set_context('talk')
plt.clf()
register = {"MeanCoverage": "Sample Mean Coverage",
"HD.FDP": "Depth of full spanning reads",
"HD.PDP": "Depth of partial spanning reads",
"HD.PEDP": "Depth of paired-end reads",
"HD.2": "Repeat size of the longer allele"}
g = sns.jointplot(x, y, data=data, kind=kind, color=color,
xlim=xlim, ylim=ylim)
g.ax_joint.set_xlabel(register.get(x, x))
g.ax_joint.set_ylabel(register.get(y, y))
savefig(figname + "." + format, cleanup=False) | def function[draw_jointplot, parameter[figname, x, y, data, kind, color, xlim, ylim, format]]:
constant[
Wraps around sns.jointplot
]
import module[seaborn] as alias[sns]
call[name[sns].set_context, parameter[constant[talk]]]
call[name[plt].clf, parameter[]]
variable[register] assign[=] dictionary[[<ast.Constant object at 0x7da18f720e80>, <ast.Constant object at 0x7da18f723790>, <ast.Constant object at 0x7da18f720a60>, <ast.Constant object at 0x7da18f722f20>, <ast.Constant object at 0x7da18f7223b0>], [<ast.Constant object at 0x7da18f722980>, <ast.Constant object at 0x7da18f722830>, <ast.Constant object at 0x7da18f722c20>, <ast.Constant object at 0x7da18f722e60>, <ast.Constant object at 0x7da18f720ca0>]]
variable[g] assign[=] call[name[sns].jointplot, parameter[name[x], name[y]]]
call[name[g].ax_joint.set_xlabel, parameter[call[name[register].get, parameter[name[x], name[x]]]]]
call[name[g].ax_joint.set_ylabel, parameter[call[name[register].get, parameter[name[y], name[y]]]]]
call[name[savefig], parameter[binary_operation[binary_operation[name[figname] + constant[.]] + name[format]]]] | keyword[def] identifier[draw_jointplot] ( identifier[figname] , identifier[x] , identifier[y] , identifier[data] = keyword[None] , identifier[kind] = literal[string] , identifier[color] = keyword[None] ,
identifier[xlim] = keyword[None] , identifier[ylim] = keyword[None] , identifier[format] = literal[string] ):
literal[string]
keyword[import] identifier[seaborn] keyword[as] identifier[sns]
identifier[sns] . identifier[set_context] ( literal[string] )
identifier[plt] . identifier[clf] ()
identifier[register] ={ literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] }
identifier[g] = identifier[sns] . identifier[jointplot] ( identifier[x] , identifier[y] , identifier[data] = identifier[data] , identifier[kind] = identifier[kind] , identifier[color] = identifier[color] ,
identifier[xlim] = identifier[xlim] , identifier[ylim] = identifier[ylim] )
identifier[g] . identifier[ax_joint] . identifier[set_xlabel] ( identifier[register] . identifier[get] ( identifier[x] , identifier[x] ))
identifier[g] . identifier[ax_joint] . identifier[set_ylabel] ( identifier[register] . identifier[get] ( identifier[y] , identifier[y] ))
identifier[savefig] ( identifier[figname] + literal[string] + identifier[format] , identifier[cleanup] = keyword[False] ) | def draw_jointplot(figname, x, y, data=None, kind='reg', color=None, xlim=None, ylim=None, format='pdf'):
"""
Wraps around sns.jointplot
"""
import seaborn as sns
sns.set_context('talk')
plt.clf()
register = {'MeanCoverage': 'Sample Mean Coverage', 'HD.FDP': 'Depth of full spanning reads', 'HD.PDP': 'Depth of partial spanning reads', 'HD.PEDP': 'Depth of paired-end reads', 'HD.2': 'Repeat size of the longer allele'}
g = sns.jointplot(x, y, data=data, kind=kind, color=color, xlim=xlim, ylim=ylim)
g.ax_joint.set_xlabel(register.get(x, x))
g.ax_joint.set_ylabel(register.get(y, y))
savefig(figname + '.' + format, cleanup=False) |
def search(self, line):
"""CN search."""
if self._session.get(d1_cli.impl.session.QUERY_ENGINE_NAME) == "solr":
return self._search_solr(line)
raise d1_cli.impl.exceptions.InvalidArguments(
"Unsupported query engine: {}".format(
self._session.get(d1_cli.impl.session.QUERY_ENGINE_NAME)
)
) | def function[search, parameter[self, line]]:
constant[CN search.]
if compare[call[name[self]._session.get, parameter[name[d1_cli].impl.session.QUERY_ENGINE_NAME]] equal[==] constant[solr]] begin[:]
return[call[name[self]._search_solr, parameter[name[line]]]]
<ast.Raise object at 0x7da1b19f0d00> | keyword[def] identifier[search] ( identifier[self] , identifier[line] ):
literal[string]
keyword[if] identifier[self] . identifier[_session] . identifier[get] ( identifier[d1_cli] . identifier[impl] . identifier[session] . identifier[QUERY_ENGINE_NAME] )== literal[string] :
keyword[return] identifier[self] . identifier[_search_solr] ( identifier[line] )
keyword[raise] identifier[d1_cli] . identifier[impl] . identifier[exceptions] . identifier[InvalidArguments] (
literal[string] . identifier[format] (
identifier[self] . identifier[_session] . identifier[get] ( identifier[d1_cli] . identifier[impl] . identifier[session] . identifier[QUERY_ENGINE_NAME] )
)
) | def search(self, line):
"""CN search."""
if self._session.get(d1_cli.impl.session.QUERY_ENGINE_NAME) == 'solr':
return self._search_solr(line) # depends on [control=['if'], data=[]]
raise d1_cli.impl.exceptions.InvalidArguments('Unsupported query engine: {}'.format(self._session.get(d1_cli.impl.session.QUERY_ENGINE_NAME))) |
def _init_matrix(self, data, index, columns, dtype=None):
"""
Init self from ndarray or list of lists.
"""
data = prep_ndarray(data, copy=False)
index, columns = self._prep_index(data, index, columns)
data = {idx: data[:, i] for i, idx in enumerate(columns)}
return self._init_dict(data, index, columns, dtype) | def function[_init_matrix, parameter[self, data, index, columns, dtype]]:
constant[
Init self from ndarray or list of lists.
]
variable[data] assign[=] call[name[prep_ndarray], parameter[name[data]]]
<ast.Tuple object at 0x7da18fe93640> assign[=] call[name[self]._prep_index, parameter[name[data], name[index], name[columns]]]
variable[data] assign[=] <ast.DictComp object at 0x7da18fe92bf0>
return[call[name[self]._init_dict, parameter[name[data], name[index], name[columns], name[dtype]]]] | keyword[def] identifier[_init_matrix] ( identifier[self] , identifier[data] , identifier[index] , identifier[columns] , identifier[dtype] = keyword[None] ):
literal[string]
identifier[data] = identifier[prep_ndarray] ( identifier[data] , identifier[copy] = keyword[False] )
identifier[index] , identifier[columns] = identifier[self] . identifier[_prep_index] ( identifier[data] , identifier[index] , identifier[columns] )
identifier[data] ={ identifier[idx] : identifier[data] [:, identifier[i] ] keyword[for] identifier[i] , identifier[idx] keyword[in] identifier[enumerate] ( identifier[columns] )}
keyword[return] identifier[self] . identifier[_init_dict] ( identifier[data] , identifier[index] , identifier[columns] , identifier[dtype] ) | def _init_matrix(self, data, index, columns, dtype=None):
"""
Init self from ndarray or list of lists.
"""
data = prep_ndarray(data, copy=False)
(index, columns) = self._prep_index(data, index, columns)
data = {idx: data[:, i] for (i, idx) in enumerate(columns)}
return self._init_dict(data, index, columns, dtype) |
def get_distance_and_image(
self,
frac_coords1: Vector3Like,
frac_coords2: Vector3Like,
jimage: Optional[Union[List[int], np.ndarray]] = None,
) -> Tuple[float, np.ndarray]:
"""
Gets distance between two frac_coords assuming periodic boundary
conditions. If the index jimage is not specified it selects the j
image nearest to the i atom and returns the distance and jimage
indices in terms of lattice vector translations. If the index jimage
is specified it returns the distance between the frac_coords1 and
the specified jimage of frac_coords2, and the given jimage is also
returned.
Args:
fcoords1 (3x1 array): Reference fcoords to get distance from.
fcoords2 (3x1 array): fcoords to get distance from.
jimage (3x1 array): Specific periodic image in terms of
lattice translations, e.g., [1,0,0] implies to take periodic
image that is one a-lattice vector away. If jimage is None,
the image that is nearest to the site is found.
Returns:
(distance, jimage): distance and periodic lattice translations
of the other site for which the distance applies. This means that
the distance between frac_coords1 and (jimage + frac_coords2) is
equal to distance.
"""
if jimage is None:
v, d2 = pbc_shortest_vectors(
self, frac_coords1, frac_coords2, return_d2=True
)
fc = self.get_fractional_coords(v[0][0]) + frac_coords1 - frac_coords2
fc = np.array(np.round(fc), dtype=np.int)
return np.sqrt(d2[0, 0]), fc
jimage = np.array(jimage)
mapped_vec = self.get_cartesian_coords(jimage + frac_coords2 - frac_coords1)
return np.linalg.norm(mapped_vec), jimage | def function[get_distance_and_image, parameter[self, frac_coords1, frac_coords2, jimage]]:
constant[
Gets distance between two frac_coords assuming periodic boundary
conditions. If the index jimage is not specified it selects the j
image nearest to the i atom and returns the distance and jimage
indices in terms of lattice vector translations. If the index jimage
is specified it returns the distance between the frac_coords1 and
the specified jimage of frac_coords2, and the given jimage is also
returned.
Args:
fcoords1 (3x1 array): Reference fcoords to get distance from.
fcoords2 (3x1 array): fcoords to get distance from.
jimage (3x1 array): Specific periodic image in terms of
lattice translations, e.g., [1,0,0] implies to take periodic
image that is one a-lattice vector away. If jimage is None,
the image that is nearest to the site is found.
Returns:
(distance, jimage): distance and periodic lattice translations
of the other site for which the distance applies. This means that
the distance between frac_coords1 and (jimage + frac_coords2) is
equal to distance.
]
if compare[name[jimage] is constant[None]] begin[:]
<ast.Tuple object at 0x7da18dc06110> assign[=] call[name[pbc_shortest_vectors], parameter[name[self], name[frac_coords1], name[frac_coords2]]]
variable[fc] assign[=] binary_operation[binary_operation[call[name[self].get_fractional_coords, parameter[call[call[name[v]][constant[0]]][constant[0]]]] + name[frac_coords1]] - name[frac_coords2]]
variable[fc] assign[=] call[name[np].array, parameter[call[name[np].round, parameter[name[fc]]]]]
return[tuple[[<ast.Call object at 0x7da1b1cac2b0>, <ast.Name object at 0x7da1b1cac490>]]]
variable[jimage] assign[=] call[name[np].array, parameter[name[jimage]]]
variable[mapped_vec] assign[=] call[name[self].get_cartesian_coords, parameter[binary_operation[binary_operation[name[jimage] + name[frac_coords2]] - name[frac_coords1]]]]
return[tuple[[<ast.Call object at 0x7da1b1caf400>, <ast.Name object at 0x7da1b1cad360>]]] | keyword[def] identifier[get_distance_and_image] (
identifier[self] ,
identifier[frac_coords1] : identifier[Vector3Like] ,
identifier[frac_coords2] : identifier[Vector3Like] ,
identifier[jimage] : identifier[Optional] [ identifier[Union] [ identifier[List] [ identifier[int] ], identifier[np] . identifier[ndarray] ]]= keyword[None] ,
)-> identifier[Tuple] [ identifier[float] , identifier[np] . identifier[ndarray] ]:
literal[string]
keyword[if] identifier[jimage] keyword[is] keyword[None] :
identifier[v] , identifier[d2] = identifier[pbc_shortest_vectors] (
identifier[self] , identifier[frac_coords1] , identifier[frac_coords2] , identifier[return_d2] = keyword[True]
)
identifier[fc] = identifier[self] . identifier[get_fractional_coords] ( identifier[v] [ literal[int] ][ literal[int] ])+ identifier[frac_coords1] - identifier[frac_coords2]
identifier[fc] = identifier[np] . identifier[array] ( identifier[np] . identifier[round] ( identifier[fc] ), identifier[dtype] = identifier[np] . identifier[int] )
keyword[return] identifier[np] . identifier[sqrt] ( identifier[d2] [ literal[int] , literal[int] ]), identifier[fc]
identifier[jimage] = identifier[np] . identifier[array] ( identifier[jimage] )
identifier[mapped_vec] = identifier[self] . identifier[get_cartesian_coords] ( identifier[jimage] + identifier[frac_coords2] - identifier[frac_coords1] )
keyword[return] identifier[np] . identifier[linalg] . identifier[norm] ( identifier[mapped_vec] ), identifier[jimage] | def get_distance_and_image(self, frac_coords1: Vector3Like, frac_coords2: Vector3Like, jimage: Optional[Union[List[int], np.ndarray]]=None) -> Tuple[float, np.ndarray]:
"""
Gets distance between two frac_coords assuming periodic boundary
conditions. If the index jimage is not specified it selects the j
image nearest to the i atom and returns the distance and jimage
indices in terms of lattice vector translations. If the index jimage
is specified it returns the distance between the frac_coords1 and
the specified jimage of frac_coords2, and the given jimage is also
returned.
Args:
fcoords1 (3x1 array): Reference fcoords to get distance from.
fcoords2 (3x1 array): fcoords to get distance from.
jimage (3x1 array): Specific periodic image in terms of
lattice translations, e.g., [1,0,0] implies to take periodic
image that is one a-lattice vector away. If jimage is None,
the image that is nearest to the site is found.
Returns:
(distance, jimage): distance and periodic lattice translations
of the other site for which the distance applies. This means that
the distance between frac_coords1 and (jimage + frac_coords2) is
equal to distance.
"""
if jimage is None:
(v, d2) = pbc_shortest_vectors(self, frac_coords1, frac_coords2, return_d2=True)
fc = self.get_fractional_coords(v[0][0]) + frac_coords1 - frac_coords2
fc = np.array(np.round(fc), dtype=np.int)
return (np.sqrt(d2[0, 0]), fc) # depends on [control=['if'], data=[]]
jimage = np.array(jimage)
mapped_vec = self.get_cartesian_coords(jimage + frac_coords2 - frac_coords1)
return (np.linalg.norm(mapped_vec), jimage) |
def change_digital(self, pin_nr, value):
""" Change digital Pin value (boolean). Also PWM supported(float)"""
if not self._board:
return
with self._lock:
self._act_digital[pin_nr].pin.write(value) | def function[change_digital, parameter[self, pin_nr, value]]:
constant[ Change digital Pin value (boolean). Also PWM supported(float)]
if <ast.UnaryOp object at 0x7da20c6a8730> begin[:]
return[None]
with name[self]._lock begin[:]
call[call[name[self]._act_digital][name[pin_nr]].pin.write, parameter[name[value]]] | keyword[def] identifier[change_digital] ( identifier[self] , identifier[pin_nr] , identifier[value] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_board] :
keyword[return]
keyword[with] identifier[self] . identifier[_lock] :
identifier[self] . identifier[_act_digital] [ identifier[pin_nr] ]. identifier[pin] . identifier[write] ( identifier[value] ) | def change_digital(self, pin_nr, value):
""" Change digital Pin value (boolean). Also PWM supported(float)"""
if not self._board:
return # depends on [control=['if'], data=[]]
with self._lock:
self._act_digital[pin_nr].pin.write(value) # depends on [control=['with'], data=[]] |
def _combine_results(self, match_as_dict):
'''Combine results from different parsed parts:
we look for non-empty results in values like
'postal_code_b' or 'postal_code_c' and store
them as main value.
So 'postal_code_b':'123456'
becomes:
'postal_code' :'123456'
'''
keys = []
vals = []
for k, v in six.iteritems(match_as_dict):
if k[-2:] in '_a_b_c_d_e_f_g_h_i_j_k_l_m':
if v:
# strip last 2 chars: '..._b' -> '...'
keys.append(k[:-2])
vals.append(v)
else:
if k not in keys:
keys.append(k)
vals.append(v)
return dict(zip(keys, vals)) | def function[_combine_results, parameter[self, match_as_dict]]:
constant[Combine results from different parsed parts:
we look for non-empty results in values like
'postal_code_b' or 'postal_code_c' and store
them as main value.
So 'postal_code_b':'123456'
becomes:
'postal_code' :'123456'
]
variable[keys] assign[=] list[[]]
variable[vals] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b27e0cd0>, <ast.Name object at 0x7da1b27e2a40>]]] in starred[call[name[six].iteritems, parameter[name[match_as_dict]]]] begin[:]
if compare[call[name[k]][<ast.Slice object at 0x7da1b27e1570>] in constant[_a_b_c_d_e_f_g_h_i_j_k_l_m]] begin[:]
if name[v] begin[:]
call[name[keys].append, parameter[call[name[k]][<ast.Slice object at 0x7da1b27e3820>]]]
call[name[vals].append, parameter[name[v]]]
return[call[name[dict], parameter[call[name[zip], parameter[name[keys], name[vals]]]]]] | keyword[def] identifier[_combine_results] ( identifier[self] , identifier[match_as_dict] ):
literal[string]
identifier[keys] =[]
identifier[vals] =[]
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[six] . identifier[iteritems] ( identifier[match_as_dict] ):
keyword[if] identifier[k] [- literal[int] :] keyword[in] literal[string] :
keyword[if] identifier[v] :
identifier[keys] . identifier[append] ( identifier[k] [:- literal[int] ])
identifier[vals] . identifier[append] ( identifier[v] )
keyword[else] :
keyword[if] identifier[k] keyword[not] keyword[in] identifier[keys] :
identifier[keys] . identifier[append] ( identifier[k] )
identifier[vals] . identifier[append] ( identifier[v] )
keyword[return] identifier[dict] ( identifier[zip] ( identifier[keys] , identifier[vals] )) | def _combine_results(self, match_as_dict):
"""Combine results from different parsed parts:
we look for non-empty results in values like
'postal_code_b' or 'postal_code_c' and store
them as main value.
So 'postal_code_b':'123456'
becomes:
'postal_code' :'123456'
"""
keys = []
vals = []
for (k, v) in six.iteritems(match_as_dict):
if k[-2:] in '_a_b_c_d_e_f_g_h_i_j_k_l_m':
if v:
# strip last 2 chars: '..._b' -> '...'
keys.append(k[:-2])
vals.append(v) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif k not in keys:
keys.append(k)
vals.append(v) # depends on [control=['if'], data=['k', 'keys']] # depends on [control=['for'], data=[]]
return dict(zip(keys, vals)) |
def type_match(a, b):
"""Return True of the types of a and b are compatible, False otherwise."""
# If the types are the same, return True
if a['type'] == b['type']:
return True
# Otherwise, look at some special cases
eq_groups = [
{'ONT::GENE-PROTEIN', 'ONT::GENE', 'ONT::PROTEIN'},
{'ONT::PHARMACOLOGIC-SUBSTANCE', 'ONT::CHEMICAL'}
]
for eq_group in eq_groups:
if a['type'] in eq_group and b['type'] in eq_group:
return True
return False | def function[type_match, parameter[a, b]]:
constant[Return True of the types of a and b are compatible, False otherwise.]
if compare[call[name[a]][constant[type]] equal[==] call[name[b]][constant[type]]] begin[:]
return[constant[True]]
variable[eq_groups] assign[=] list[[<ast.Set object at 0x7da204565f60>, <ast.Set object at 0x7da204564940>]]
for taget[name[eq_group]] in starred[name[eq_groups]] begin[:]
if <ast.BoolOp object at 0x7da2041d9ff0> begin[:]
return[constant[True]]
return[constant[False]] | keyword[def] identifier[type_match] ( identifier[a] , identifier[b] ):
literal[string]
keyword[if] identifier[a] [ literal[string] ]== identifier[b] [ literal[string] ]:
keyword[return] keyword[True]
identifier[eq_groups] =[
{ literal[string] , literal[string] , literal[string] },
{ literal[string] , literal[string] }
]
keyword[for] identifier[eq_group] keyword[in] identifier[eq_groups] :
keyword[if] identifier[a] [ literal[string] ] keyword[in] identifier[eq_group] keyword[and] identifier[b] [ literal[string] ] keyword[in] identifier[eq_group] :
keyword[return] keyword[True]
keyword[return] keyword[False] | def type_match(a, b):
"""Return True of the types of a and b are compatible, False otherwise."""
# If the types are the same, return True
if a['type'] == b['type']:
return True # depends on [control=['if'], data=[]]
# Otherwise, look at some special cases
eq_groups = [{'ONT::GENE-PROTEIN', 'ONT::GENE', 'ONT::PROTEIN'}, {'ONT::PHARMACOLOGIC-SUBSTANCE', 'ONT::CHEMICAL'}]
for eq_group in eq_groups:
if a['type'] in eq_group and b['type'] in eq_group:
return True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['eq_group']]
return False |
def get_subscribers(cls, active_only=True):
"""Returns a list of Recipient objects subscribed for this message type.
:param bool active_only: Flag whether
:return:
"""
subscribers_raw = Subscription.get_for_message_cls(cls.alias)
subscribers = []
for subscriber in subscribers_raw:
messenger_cls = subscriber.messenger_cls
address = subscriber.address
recipient = subscriber.recipient
# Do not send messages to inactive users.
if active_only and recipient:
if not getattr(recipient, 'is_active', False):
continue
if address is None:
try:
address = get_registered_messenger_object(messenger_cls).get_address(recipient)
except UnknownMessengerError:
pass
if address and isinstance(address, string_types):
subscribers.append(Recipient(messenger_cls, recipient, address))
return subscribers | def function[get_subscribers, parameter[cls, active_only]]:
constant[Returns a list of Recipient objects subscribed for this message type.
:param bool active_only: Flag whether
:return:
]
variable[subscribers_raw] assign[=] call[name[Subscription].get_for_message_cls, parameter[name[cls].alias]]
variable[subscribers] assign[=] list[[]]
for taget[name[subscriber]] in starred[name[subscribers_raw]] begin[:]
variable[messenger_cls] assign[=] name[subscriber].messenger_cls
variable[address] assign[=] name[subscriber].address
variable[recipient] assign[=] name[subscriber].recipient
if <ast.BoolOp object at 0x7da1b26f27d0> begin[:]
if <ast.UnaryOp object at 0x7da1b26f1210> begin[:]
continue
if compare[name[address] is constant[None]] begin[:]
<ast.Try object at 0x7da1b26f3f40>
if <ast.BoolOp object at 0x7da1b287bfd0> begin[:]
call[name[subscribers].append, parameter[call[name[Recipient], parameter[name[messenger_cls], name[recipient], name[address]]]]]
return[name[subscribers]] | keyword[def] identifier[get_subscribers] ( identifier[cls] , identifier[active_only] = keyword[True] ):
literal[string]
identifier[subscribers_raw] = identifier[Subscription] . identifier[get_for_message_cls] ( identifier[cls] . identifier[alias] )
identifier[subscribers] =[]
keyword[for] identifier[subscriber] keyword[in] identifier[subscribers_raw] :
identifier[messenger_cls] = identifier[subscriber] . identifier[messenger_cls]
identifier[address] = identifier[subscriber] . identifier[address]
identifier[recipient] = identifier[subscriber] . identifier[recipient]
keyword[if] identifier[active_only] keyword[and] identifier[recipient] :
keyword[if] keyword[not] identifier[getattr] ( identifier[recipient] , literal[string] , keyword[False] ):
keyword[continue]
keyword[if] identifier[address] keyword[is] keyword[None] :
keyword[try] :
identifier[address] = identifier[get_registered_messenger_object] ( identifier[messenger_cls] ). identifier[get_address] ( identifier[recipient] )
keyword[except] identifier[UnknownMessengerError] :
keyword[pass]
keyword[if] identifier[address] keyword[and] identifier[isinstance] ( identifier[address] , identifier[string_types] ):
identifier[subscribers] . identifier[append] ( identifier[Recipient] ( identifier[messenger_cls] , identifier[recipient] , identifier[address] ))
keyword[return] identifier[subscribers] | def get_subscribers(cls, active_only=True):
"""Returns a list of Recipient objects subscribed for this message type.
:param bool active_only: Flag whether
:return:
"""
subscribers_raw = Subscription.get_for_message_cls(cls.alias)
subscribers = []
for subscriber in subscribers_raw:
messenger_cls = subscriber.messenger_cls
address = subscriber.address
recipient = subscriber.recipient
# Do not send messages to inactive users.
if active_only and recipient:
if not getattr(recipient, 'is_active', False):
continue # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if address is None:
try:
address = get_registered_messenger_object(messenger_cls).get_address(recipient) # depends on [control=['try'], data=[]]
except UnknownMessengerError:
pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['address']]
if address and isinstance(address, string_types):
subscribers.append(Recipient(messenger_cls, recipient, address)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['subscriber']]
return subscribers |
def create_tutorial_layout(self):
""" layout for example tutorial """
lexer, _, _ = get_lexers(self.shell_ctx.lexer, None, None)
layout_full = HSplit([
FloatContainer(
Window(
BufferControl(
input_processors=self.input_processors,
lexer=lexer,
preview_search=Always()),
get_height=get_height),
[
Float(xcursor=True,
ycursor=True,
content=CompletionsMenu(
max_height=MAX_COMPLETION,
scroll_offset=1,
extra_filter=(HasFocus(DEFAULT_BUFFER))))]),
ConditionalContainer(
HSplit([
get_hline(),
get_param(lexer),
get_hline(),
Window(
content=BufferControl(
buffer_name='example_line',
lexer=lexer
),
),
Window(
TokenListControl(
get_tutorial_tokens,
default_char=Char(' ', Token.Toolbar)),
height=LayoutDimension.exact(1)),
]),
filter=~IsDone() & RendererHeightIsKnown()
)
])
return layout_full | def function[create_tutorial_layout, parameter[self]]:
constant[ layout for example tutorial ]
<ast.Tuple object at 0x7da1b26ae3b0> assign[=] call[name[get_lexers], parameter[name[self].shell_ctx.lexer, constant[None], constant[None]]]
variable[layout_full] assign[=] call[name[HSplit], parameter[list[[<ast.Call object at 0x7da18bc72e30>, <ast.Call object at 0x7da20c992080>]]]]
return[name[layout_full]] | keyword[def] identifier[create_tutorial_layout] ( identifier[self] ):
literal[string]
identifier[lexer] , identifier[_] , identifier[_] = identifier[get_lexers] ( identifier[self] . identifier[shell_ctx] . identifier[lexer] , keyword[None] , keyword[None] )
identifier[layout_full] = identifier[HSplit] ([
identifier[FloatContainer] (
identifier[Window] (
identifier[BufferControl] (
identifier[input_processors] = identifier[self] . identifier[input_processors] ,
identifier[lexer] = identifier[lexer] ,
identifier[preview_search] = identifier[Always] ()),
identifier[get_height] = identifier[get_height] ),
[
identifier[Float] ( identifier[xcursor] = keyword[True] ,
identifier[ycursor] = keyword[True] ,
identifier[content] = identifier[CompletionsMenu] (
identifier[max_height] = identifier[MAX_COMPLETION] ,
identifier[scroll_offset] = literal[int] ,
identifier[extra_filter] =( identifier[HasFocus] ( identifier[DEFAULT_BUFFER] ))))]),
identifier[ConditionalContainer] (
identifier[HSplit] ([
identifier[get_hline] (),
identifier[get_param] ( identifier[lexer] ),
identifier[get_hline] (),
identifier[Window] (
identifier[content] = identifier[BufferControl] (
identifier[buffer_name] = literal[string] ,
identifier[lexer] = identifier[lexer]
),
),
identifier[Window] (
identifier[TokenListControl] (
identifier[get_tutorial_tokens] ,
identifier[default_char] = identifier[Char] ( literal[string] , identifier[Token] . identifier[Toolbar] )),
identifier[height] = identifier[LayoutDimension] . identifier[exact] ( literal[int] )),
]),
identifier[filter] =~ identifier[IsDone] ()& identifier[RendererHeightIsKnown] ()
)
])
keyword[return] identifier[layout_full] | def create_tutorial_layout(self):
""" layout for example tutorial """
(lexer, _, _) = get_lexers(self.shell_ctx.lexer, None, None)
layout_full = HSplit([FloatContainer(Window(BufferControl(input_processors=self.input_processors, lexer=lexer, preview_search=Always()), get_height=get_height), [Float(xcursor=True, ycursor=True, content=CompletionsMenu(max_height=MAX_COMPLETION, scroll_offset=1, extra_filter=HasFocus(DEFAULT_BUFFER)))]), ConditionalContainer(HSplit([get_hline(), get_param(lexer), get_hline(), Window(content=BufferControl(buffer_name='example_line', lexer=lexer)), Window(TokenListControl(get_tutorial_tokens, default_char=Char(' ', Token.Toolbar)), height=LayoutDimension.exact(1))]), filter=~IsDone() & RendererHeightIsKnown())])
return layout_full |
def get_definition(query):
"""Returns dictionary of id, first names of people who posted on my wall
between start and end time"""
try:
return get_definition_api(query)
except:
raise
# http://api.wordnik.com:80/v4/word.json/discrimination/definitions?limit=200&includeRelated=true&sourceDictionaries=all&useCanonical=false&includeTags=false&api_key=a2a73e7b926c924fad7001ca3111acd55af2ffabf50eb4ae5
import json
payload = {'q': query, 'limit': 200, 'includeRelated': 'true', 'sourceDictionaries': 'all',
'useCanonical': 'false', 'includeTags': 'false',
'api_key': 'a2a73e7b926c924fad7001ca3111acd55af2ffabf50eb4ae5'}
url = 'http://api.wordnik.com:80/v4/word.json/%s/definitions' % query
r = requests.get(url, params=payload)
result = json.loads(r.text)
return result | def function[get_definition, parameter[query]]:
constant[Returns dictionary of id, first names of people who posted on my wall
between start and end time]
<ast.Try object at 0x7da2041da110>
import module[json]
variable[payload] assign[=] dictionary[[<ast.Constant object at 0x7da2041db7c0>, <ast.Constant object at 0x7da2041d9f60>, <ast.Constant object at 0x7da2041db850>, <ast.Constant object at 0x7da2041d9870>, <ast.Constant object at 0x7da2041d8460>, <ast.Constant object at 0x7da2041d99f0>, <ast.Constant object at 0x7da2041dbc40>], [<ast.Name object at 0x7da2041dbaf0>, <ast.Constant object at 0x7da2041dab00>, <ast.Constant object at 0x7da2041d8250>, <ast.Constant object at 0x7da2041d8ac0>, <ast.Constant object at 0x7da2041d8c10>, <ast.Constant object at 0x7da2041d88b0>, <ast.Constant object at 0x7da2041d9150>]]
variable[url] assign[=] binary_operation[constant[http://api.wordnik.com:80/v4/word.json/%s/definitions] <ast.Mod object at 0x7da2590d6920> name[query]]
variable[r] assign[=] call[name[requests].get, parameter[name[url]]]
variable[result] assign[=] call[name[json].loads, parameter[name[r].text]]
return[name[result]] | keyword[def] identifier[get_definition] ( identifier[query] ):
literal[string]
keyword[try] :
keyword[return] identifier[get_definition_api] ( identifier[query] )
keyword[except] :
keyword[raise]
keyword[import] identifier[json]
identifier[payload] ={ literal[string] : identifier[query] , literal[string] : literal[int] , literal[string] : literal[string] , literal[string] : literal[string] ,
literal[string] : literal[string] , literal[string] : literal[string] ,
literal[string] : literal[string] }
identifier[url] = literal[string] % identifier[query]
identifier[r] = identifier[requests] . identifier[get] ( identifier[url] , identifier[params] = identifier[payload] )
identifier[result] = identifier[json] . identifier[loads] ( identifier[r] . identifier[text] )
keyword[return] identifier[result] | def get_definition(query):
"""Returns dictionary of id, first names of people who posted on my wall
between start and end time"""
try:
return get_definition_api(query) # depends on [control=['try'], data=[]]
except:
raise # depends on [control=['except'], data=[]]
# http://api.wordnik.com:80/v4/word.json/discrimination/definitions?limit=200&includeRelated=true&sourceDictionaries=all&useCanonical=false&includeTags=false&api_key=a2a73e7b926c924fad7001ca3111acd55af2ffabf50eb4ae5
import json
payload = {'q': query, 'limit': 200, 'includeRelated': 'true', 'sourceDictionaries': 'all', 'useCanonical': 'false', 'includeTags': 'false', 'api_key': 'a2a73e7b926c924fad7001ca3111acd55af2ffabf50eb4ae5'}
url = 'http://api.wordnik.com:80/v4/word.json/%s/definitions' % query
r = requests.get(url, params=payload)
result = json.loads(r.text)
return result |
def validate(self, r):
'''
Called automatically by self.result.
'''
if self.show_invalid:
r.valid = True
elif r.valid:
if not r.description:
r.valid = False
if r.size and (r.size + r.offset) > r.file.size:
r.valid = False
if r.jump and (r.jump + r.offset) > r.file.size:
r.valid = False
if hasattr(r, "location") and (r.location != r.offset):
r.valid = False
if r.valid:
# Don't keep displaying signatures that repeat a bunch of times
# (e.g., JFFS2 nodes)
if r.id == self.one_of_many:
r.display = False
elif r.many:
self.one_of_many = r.id
else:
self.one_of_many = None | def function[validate, parameter[self, r]]:
constant[
Called automatically by self.result.
]
if name[self].show_invalid begin[:]
name[r].valid assign[=] constant[True]
if name[r].valid begin[:]
if compare[name[r].id equal[==] name[self].one_of_many] begin[:]
name[r].display assign[=] constant[False] | keyword[def] identifier[validate] ( identifier[self] , identifier[r] ):
literal[string]
keyword[if] identifier[self] . identifier[show_invalid] :
identifier[r] . identifier[valid] = keyword[True]
keyword[elif] identifier[r] . identifier[valid] :
keyword[if] keyword[not] identifier[r] . identifier[description] :
identifier[r] . identifier[valid] = keyword[False]
keyword[if] identifier[r] . identifier[size] keyword[and] ( identifier[r] . identifier[size] + identifier[r] . identifier[offset] )> identifier[r] . identifier[file] . identifier[size] :
identifier[r] . identifier[valid] = keyword[False]
keyword[if] identifier[r] . identifier[jump] keyword[and] ( identifier[r] . identifier[jump] + identifier[r] . identifier[offset] )> identifier[r] . identifier[file] . identifier[size] :
identifier[r] . identifier[valid] = keyword[False]
keyword[if] identifier[hasattr] ( identifier[r] , literal[string] ) keyword[and] ( identifier[r] . identifier[location] != identifier[r] . identifier[offset] ):
identifier[r] . identifier[valid] = keyword[False]
keyword[if] identifier[r] . identifier[valid] :
keyword[if] identifier[r] . identifier[id] == identifier[self] . identifier[one_of_many] :
identifier[r] . identifier[display] = keyword[False]
keyword[elif] identifier[r] . identifier[many] :
identifier[self] . identifier[one_of_many] = identifier[r] . identifier[id]
keyword[else] :
identifier[self] . identifier[one_of_many] = keyword[None] | def validate(self, r):
"""
Called automatically by self.result.
"""
if self.show_invalid:
r.valid = True # depends on [control=['if'], data=[]]
elif r.valid:
if not r.description:
r.valid = False # depends on [control=['if'], data=[]]
if r.size and r.size + r.offset > r.file.size:
r.valid = False # depends on [control=['if'], data=[]]
if r.jump and r.jump + r.offset > r.file.size:
r.valid = False # depends on [control=['if'], data=[]]
if hasattr(r, 'location') and r.location != r.offset:
r.valid = False # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if r.valid:
# Don't keep displaying signatures that repeat a bunch of times
# (e.g., JFFS2 nodes)
if r.id == self.one_of_many:
r.display = False # depends on [control=['if'], data=[]]
elif r.many:
self.one_of_many = r.id # depends on [control=['if'], data=[]]
else:
self.one_of_many = None # depends on [control=['if'], data=[]] |
def log_loss(actual, predicted):
"""Log of the loss (error) summed over all entries
The negative of the logarithm of the frequency (probability) of the predicted
label given the true binary label for a category.
Arguments:
predicted (np.array of float): 2-D table of probabilities for each
category (columns) and each record (rows)
actual (np.array of float): True binary labels for each category
Should only have a single 1 on each row indicating the one
correct category (column)
Based On:
https://www.kaggle.com/wiki/LogarithmicLoss
http://scikit-learn.org/stable/modules/model_evaluation.html#log-loss
"""
predicted, actual = np.array(predicted), np.array(actual)
small_value = 1e-15
predicted[predicted < small_value] = small_value
predicted[predicted > 1 - small_value] = 1. - small_value
return (-1. / len(actual)) * np.sum(
actual * np.log(predicted) + (1. - actual) * np.log(1. - predicted)) | def function[log_loss, parameter[actual, predicted]]:
constant[Log of the loss (error) summed over all entries
The negative of the logarithm of the frequency (probability) of the predicted
label given the true binary label for a category.
Arguments:
predicted (np.array of float): 2-D table of probabilities for each
category (columns) and each record (rows)
actual (np.array of float): True binary labels for each category
Should only have a single 1 on each row indicating the one
correct category (column)
Based On:
https://www.kaggle.com/wiki/LogarithmicLoss
http://scikit-learn.org/stable/modules/model_evaluation.html#log-loss
]
<ast.Tuple object at 0x7da20c7c8fa0> assign[=] tuple[[<ast.Call object at 0x7da20c7cb4c0>, <ast.Call object at 0x7da20c7cbe80>]]
variable[small_value] assign[=] constant[1e-15]
call[name[predicted]][compare[name[predicted] less[<] name[small_value]]] assign[=] name[small_value]
call[name[predicted]][compare[name[predicted] greater[>] binary_operation[constant[1] - name[small_value]]]] assign[=] binary_operation[constant[1.0] - name[small_value]]
return[binary_operation[binary_operation[<ast.UnaryOp object at 0x7da20e954e50> / call[name[len], parameter[name[actual]]]] * call[name[np].sum, parameter[binary_operation[binary_operation[name[actual] * call[name[np].log, parameter[name[predicted]]]] + binary_operation[binary_operation[constant[1.0] - name[actual]] * call[name[np].log, parameter[binary_operation[constant[1.0] - name[predicted]]]]]]]]]] | keyword[def] identifier[log_loss] ( identifier[actual] , identifier[predicted] ):
literal[string]
identifier[predicted] , identifier[actual] = identifier[np] . identifier[array] ( identifier[predicted] ), identifier[np] . identifier[array] ( identifier[actual] )
identifier[small_value] = literal[int]
identifier[predicted] [ identifier[predicted] < identifier[small_value] ]= identifier[small_value]
identifier[predicted] [ identifier[predicted] > literal[int] - identifier[small_value] ]= literal[int] - identifier[small_value]
keyword[return] (- literal[int] / identifier[len] ( identifier[actual] ))* identifier[np] . identifier[sum] (
identifier[actual] * identifier[np] . identifier[log] ( identifier[predicted] )+( literal[int] - identifier[actual] )* identifier[np] . identifier[log] ( literal[int] - identifier[predicted] )) | def log_loss(actual, predicted):
"""Log of the loss (error) summed over all entries
The negative of the logarithm of the frequency (probability) of the predicted
label given the true binary label for a category.
Arguments:
predicted (np.array of float): 2-D table of probabilities for each
category (columns) and each record (rows)
actual (np.array of float): True binary labels for each category
Should only have a single 1 on each row indicating the one
correct category (column)
Based On:
https://www.kaggle.com/wiki/LogarithmicLoss
http://scikit-learn.org/stable/modules/model_evaluation.html#log-loss
"""
(predicted, actual) = (np.array(predicted), np.array(actual))
small_value = 1e-15
predicted[predicted < small_value] = small_value
predicted[predicted > 1 - small_value] = 1.0 - small_value
return -1.0 / len(actual) * np.sum(actual * np.log(predicted) + (1.0 - actual) * np.log(1.0 - predicted)) |
def parse(self):
"""Main entrypoint into the parser. It interprets and creates all the
relevant Lutron objects and stuffs them into the appropriate hierarchy."""
import xml.etree.ElementTree as ET
root = ET.fromstring(self._xml_db_str)
# The structure is something like this:
# <Areas>
# <Area ...>
# <DeviceGroups ...>
# <Scenes ...>
# <ShadeGroups ...>
# <Outputs ...>
# <Areas ...>
# <Area ...>
# First area is useless, it's the top-level project area that defines the
# "house". It contains the real nested Areas tree, which is the one we want.
top_area = root.find('Areas').find('Area')
self.project_name = top_area.get('Name')
areas = top_area.find('Areas')
for area_xml in areas.getiterator('Area'):
area = self._parse_area(area_xml)
self.areas.append(area)
return True | def function[parse, parameter[self]]:
constant[Main entrypoint into the parser. It interprets and creates all the
relevant Lutron objects and stuffs them into the appropriate hierarchy.]
import module[xml.etree.ElementTree] as alias[ET]
variable[root] assign[=] call[name[ET].fromstring, parameter[name[self]._xml_db_str]]
variable[top_area] assign[=] call[call[name[root].find, parameter[constant[Areas]]].find, parameter[constant[Area]]]
name[self].project_name assign[=] call[name[top_area].get, parameter[constant[Name]]]
variable[areas] assign[=] call[name[top_area].find, parameter[constant[Areas]]]
for taget[name[area_xml]] in starred[call[name[areas].getiterator, parameter[constant[Area]]]] begin[:]
variable[area] assign[=] call[name[self]._parse_area, parameter[name[area_xml]]]
call[name[self].areas.append, parameter[name[area]]]
return[constant[True]] | keyword[def] identifier[parse] ( identifier[self] ):
literal[string]
keyword[import] identifier[xml] . identifier[etree] . identifier[ElementTree] keyword[as] identifier[ET]
identifier[root] = identifier[ET] . identifier[fromstring] ( identifier[self] . identifier[_xml_db_str] )
identifier[top_area] = identifier[root] . identifier[find] ( literal[string] ). identifier[find] ( literal[string] )
identifier[self] . identifier[project_name] = identifier[top_area] . identifier[get] ( literal[string] )
identifier[areas] = identifier[top_area] . identifier[find] ( literal[string] )
keyword[for] identifier[area_xml] keyword[in] identifier[areas] . identifier[getiterator] ( literal[string] ):
identifier[area] = identifier[self] . identifier[_parse_area] ( identifier[area_xml] )
identifier[self] . identifier[areas] . identifier[append] ( identifier[area] )
keyword[return] keyword[True] | def parse(self):
"""Main entrypoint into the parser. It interprets and creates all the
relevant Lutron objects and stuffs them into the appropriate hierarchy."""
import xml.etree.ElementTree as ET
root = ET.fromstring(self._xml_db_str)
# The structure is something like this:
# <Areas>
# <Area ...>
# <DeviceGroups ...>
# <Scenes ...>
# <ShadeGroups ...>
# <Outputs ...>
# <Areas ...>
# <Area ...>
# First area is useless, it's the top-level project area that defines the
# "house". It contains the real nested Areas tree, which is the one we want.
top_area = root.find('Areas').find('Area')
self.project_name = top_area.get('Name')
areas = top_area.find('Areas')
for area_xml in areas.getiterator('Area'):
area = self._parse_area(area_xml)
self.areas.append(area) # depends on [control=['for'], data=['area_xml']]
return True |
def configure_searchfor(self, ns, definition):
"""
Register a relation endpoint.
The definition's func should be a search function, which must:
- accept kwargs for the query string (minimally for pagination)
- return a tuple of (items, count, context) where count is the total number of items
available (in the case of pagination) and context is a dictionary providing any
needed context variables for constructing pagination links
The definition's request_schema will be used to process query string arguments.
:param ns: the namespace
:param definition: the endpoint definition
"""
paginated_list_schema = self.page_cls.make_paginated_list_schema_class(
ns.object_ns,
definition.response_schema,
)()
@self.add_route(ns.relation_path, Operation.SearchFor, ns)
@qs(definition.request_schema)
@response(paginated_list_schema)
@wraps(definition.func)
def search(**path_data):
page = self.page_cls.from_query_string(definition.request_schema)
result = definition.func(**merge_data(path_data, page.to_dict(func=identity)))
response_data, headers = page.to_paginated_list(result, ns, Operation.SearchFor)
definition.header_func(headers, response_data)
response_format = self.negotiate_response_content(definition.response_formats)
return dump_response_data(
paginated_list_schema,
response_data,
headers=headers,
response_format=response_format,
)
search.__doc__ = "Search for {} relative to a {}".format(pluralize(ns.object_name), ns.subject_name) | def function[configure_searchfor, parameter[self, ns, definition]]:
constant[
Register a relation endpoint.
The definition's func should be a search function, which must:
- accept kwargs for the query string (minimally for pagination)
- return a tuple of (items, count, context) where count is the total number of items
available (in the case of pagination) and context is a dictionary providing any
needed context variables for constructing pagination links
The definition's request_schema will be used to process query string arguments.
:param ns: the namespace
:param definition: the endpoint definition
]
variable[paginated_list_schema] assign[=] call[call[name[self].page_cls.make_paginated_list_schema_class, parameter[name[ns].object_ns, name[definition].response_schema]], parameter[]]
def function[search, parameter[]]:
variable[page] assign[=] call[name[self].page_cls.from_query_string, parameter[name[definition].request_schema]]
variable[result] assign[=] call[name[definition].func, parameter[]]
<ast.Tuple object at 0x7da1b0c64f40> assign[=] call[name[page].to_paginated_list, parameter[name[result], name[ns], name[Operation].SearchFor]]
call[name[definition].header_func, parameter[name[headers], name[response_data]]]
variable[response_format] assign[=] call[name[self].negotiate_response_content, parameter[name[definition].response_formats]]
return[call[name[dump_response_data], parameter[name[paginated_list_schema], name[response_data]]]]
name[search].__doc__ assign[=] call[constant[Search for {} relative to a {}].format, parameter[call[name[pluralize], parameter[name[ns].object_name]], name[ns].subject_name]] | keyword[def] identifier[configure_searchfor] ( identifier[self] , identifier[ns] , identifier[definition] ):
literal[string]
identifier[paginated_list_schema] = identifier[self] . identifier[page_cls] . identifier[make_paginated_list_schema_class] (
identifier[ns] . identifier[object_ns] ,
identifier[definition] . identifier[response_schema] ,
)()
@ identifier[self] . identifier[add_route] ( identifier[ns] . identifier[relation_path] , identifier[Operation] . identifier[SearchFor] , identifier[ns] )
@ identifier[qs] ( identifier[definition] . identifier[request_schema] )
@ identifier[response] ( identifier[paginated_list_schema] )
@ identifier[wraps] ( identifier[definition] . identifier[func] )
keyword[def] identifier[search] (** identifier[path_data] ):
identifier[page] = identifier[self] . identifier[page_cls] . identifier[from_query_string] ( identifier[definition] . identifier[request_schema] )
identifier[result] = identifier[definition] . identifier[func] (** identifier[merge_data] ( identifier[path_data] , identifier[page] . identifier[to_dict] ( identifier[func] = identifier[identity] )))
identifier[response_data] , identifier[headers] = identifier[page] . identifier[to_paginated_list] ( identifier[result] , identifier[ns] , identifier[Operation] . identifier[SearchFor] )
identifier[definition] . identifier[header_func] ( identifier[headers] , identifier[response_data] )
identifier[response_format] = identifier[self] . identifier[negotiate_response_content] ( identifier[definition] . identifier[response_formats] )
keyword[return] identifier[dump_response_data] (
identifier[paginated_list_schema] ,
identifier[response_data] ,
identifier[headers] = identifier[headers] ,
identifier[response_format] = identifier[response_format] ,
)
identifier[search] . identifier[__doc__] = literal[string] . identifier[format] ( identifier[pluralize] ( identifier[ns] . identifier[object_name] ), identifier[ns] . identifier[subject_name] ) | def configure_searchfor(self, ns, definition):
"""
Register a relation endpoint.
The definition's func should be a search function, which must:
- accept kwargs for the query string (minimally for pagination)
- return a tuple of (items, count, context) where count is the total number of items
available (in the case of pagination) and context is a dictionary providing any
needed context variables for constructing pagination links
The definition's request_schema will be used to process query string arguments.
:param ns: the namespace
:param definition: the endpoint definition
"""
paginated_list_schema = self.page_cls.make_paginated_list_schema_class(ns.object_ns, definition.response_schema)()
@self.add_route(ns.relation_path, Operation.SearchFor, ns)
@qs(definition.request_schema)
@response(paginated_list_schema)
@wraps(definition.func)
def search(**path_data):
page = self.page_cls.from_query_string(definition.request_schema)
result = definition.func(**merge_data(path_data, page.to_dict(func=identity)))
(response_data, headers) = page.to_paginated_list(result, ns, Operation.SearchFor)
definition.header_func(headers, response_data)
response_format = self.negotiate_response_content(definition.response_formats)
return dump_response_data(paginated_list_schema, response_data, headers=headers, response_format=response_format)
search.__doc__ = 'Search for {} relative to a {}'.format(pluralize(ns.object_name), ns.subject_name) |
def get_all_build_configs_by_labels(self, label_selectors):
"""
Returns all builds matching a given set of label selectors. It is up to the
calling function to filter the results.
"""
labels = ['%s=%s' % (field, value) for field, value in label_selectors]
labels = ','.join(labels)
url = self._build_url("buildconfigs/", labelSelector=labels)
return self._get(url).json()['items'] | def function[get_all_build_configs_by_labels, parameter[self, label_selectors]]:
constant[
Returns all builds matching a given set of label selectors. It is up to the
calling function to filter the results.
]
variable[labels] assign[=] <ast.ListComp object at 0x7da1b0fdab90>
variable[labels] assign[=] call[constant[,].join, parameter[name[labels]]]
variable[url] assign[=] call[name[self]._build_url, parameter[constant[buildconfigs/]]]
return[call[call[call[name[self]._get, parameter[name[url]]].json, parameter[]]][constant[items]]] | keyword[def] identifier[get_all_build_configs_by_labels] ( identifier[self] , identifier[label_selectors] ):
literal[string]
identifier[labels] =[ literal[string] %( identifier[field] , identifier[value] ) keyword[for] identifier[field] , identifier[value] keyword[in] identifier[label_selectors] ]
identifier[labels] = literal[string] . identifier[join] ( identifier[labels] )
identifier[url] = identifier[self] . identifier[_build_url] ( literal[string] , identifier[labelSelector] = identifier[labels] )
keyword[return] identifier[self] . identifier[_get] ( identifier[url] ). identifier[json] ()[ literal[string] ] | def get_all_build_configs_by_labels(self, label_selectors):
"""
Returns all builds matching a given set of label selectors. It is up to the
calling function to filter the results.
"""
labels = ['%s=%s' % (field, value) for (field, value) in label_selectors]
labels = ','.join(labels)
url = self._build_url('buildconfigs/', labelSelector=labels)
return self._get(url).json()['items'] |
def read_ipx(self, length):
"""Read Internetwork Packet Exchange.
Structure of IPX header [RFC 1132]:
Octets Bits Name Description
0 0 ipx.cksum Checksum
2 16 ipx.len Packet Length (header includes)
4 32 ipx.count Transport Control (hop count)
5 40 ipx.type Packet Type
6 48 ipx.dst Destination Address
18 144 ipx.src Source Address
"""
if length is None:
length = len(self)
_csum = self._read_fileng(2)
_tlen = self._read_unpack(2)
_ctrl = self._read_unpack(1)
_type = self._read_unpack(1)
_dsta = self._read_ipx_address()
_srca = self._read_ipx_address()
ipx = dict(
chksum=_csum,
len=_tlen,
count=_ctrl,
type=TYPE.get(_type),
dst=_dsta,
src=_srca,
)
proto = ipx['type']
length = ipx['len'] - 30
ipx['packet'] = self._read_packet(header=30, payload=length)
return self._decode_next_layer(ipx, proto, length) | def function[read_ipx, parameter[self, length]]:
constant[Read Internetwork Packet Exchange.
Structure of IPX header [RFC 1132]:
Octets Bits Name Description
0 0 ipx.cksum Checksum
2 16 ipx.len Packet Length (header includes)
4 32 ipx.count Transport Control (hop count)
5 40 ipx.type Packet Type
6 48 ipx.dst Destination Address
18 144 ipx.src Source Address
]
if compare[name[length] is constant[None]] begin[:]
variable[length] assign[=] call[name[len], parameter[name[self]]]
variable[_csum] assign[=] call[name[self]._read_fileng, parameter[constant[2]]]
variable[_tlen] assign[=] call[name[self]._read_unpack, parameter[constant[2]]]
variable[_ctrl] assign[=] call[name[self]._read_unpack, parameter[constant[1]]]
variable[_type] assign[=] call[name[self]._read_unpack, parameter[constant[1]]]
variable[_dsta] assign[=] call[name[self]._read_ipx_address, parameter[]]
variable[_srca] assign[=] call[name[self]._read_ipx_address, parameter[]]
variable[ipx] assign[=] call[name[dict], parameter[]]
variable[proto] assign[=] call[name[ipx]][constant[type]]
variable[length] assign[=] binary_operation[call[name[ipx]][constant[len]] - constant[30]]
call[name[ipx]][constant[packet]] assign[=] call[name[self]._read_packet, parameter[]]
return[call[name[self]._decode_next_layer, parameter[name[ipx], name[proto], name[length]]]] | keyword[def] identifier[read_ipx] ( identifier[self] , identifier[length] ):
literal[string]
keyword[if] identifier[length] keyword[is] keyword[None] :
identifier[length] = identifier[len] ( identifier[self] )
identifier[_csum] = identifier[self] . identifier[_read_fileng] ( literal[int] )
identifier[_tlen] = identifier[self] . identifier[_read_unpack] ( literal[int] )
identifier[_ctrl] = identifier[self] . identifier[_read_unpack] ( literal[int] )
identifier[_type] = identifier[self] . identifier[_read_unpack] ( literal[int] )
identifier[_dsta] = identifier[self] . identifier[_read_ipx_address] ()
identifier[_srca] = identifier[self] . identifier[_read_ipx_address] ()
identifier[ipx] = identifier[dict] (
identifier[chksum] = identifier[_csum] ,
identifier[len] = identifier[_tlen] ,
identifier[count] = identifier[_ctrl] ,
identifier[type] = identifier[TYPE] . identifier[get] ( identifier[_type] ),
identifier[dst] = identifier[_dsta] ,
identifier[src] = identifier[_srca] ,
)
identifier[proto] = identifier[ipx] [ literal[string] ]
identifier[length] = identifier[ipx] [ literal[string] ]- literal[int]
identifier[ipx] [ literal[string] ]= identifier[self] . identifier[_read_packet] ( identifier[header] = literal[int] , identifier[payload] = identifier[length] )
keyword[return] identifier[self] . identifier[_decode_next_layer] ( identifier[ipx] , identifier[proto] , identifier[length] ) | def read_ipx(self, length):
"""Read Internetwork Packet Exchange.
Structure of IPX header [RFC 1132]:
Octets Bits Name Description
0 0 ipx.cksum Checksum
2 16 ipx.len Packet Length (header includes)
4 32 ipx.count Transport Control (hop count)
5 40 ipx.type Packet Type
6 48 ipx.dst Destination Address
18 144 ipx.src Source Address
"""
if length is None:
length = len(self) # depends on [control=['if'], data=['length']]
_csum = self._read_fileng(2)
_tlen = self._read_unpack(2)
_ctrl = self._read_unpack(1)
_type = self._read_unpack(1)
_dsta = self._read_ipx_address()
_srca = self._read_ipx_address()
ipx = dict(chksum=_csum, len=_tlen, count=_ctrl, type=TYPE.get(_type), dst=_dsta, src=_srca)
proto = ipx['type']
length = ipx['len'] - 30
ipx['packet'] = self._read_packet(header=30, payload=length)
return self._decode_next_layer(ipx, proto, length) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.