body
stringlengths
26
98.2k
body_hash
int64
-9,222,864,604,528,158,000
9,221,803,474B
docstring
stringlengths
1
16.8k
path
stringlengths
5
230
name
stringlengths
1
96
repository_name
stringlengths
7
89
lang
stringclasses
1 value
body_without_docstring
stringlengths
20
98.2k
def teams_get_workgroups_v2(self, **kwargs): 'Get all available Workgroups # noqa: E501\n\n Operation to get IDs and names for all available Workgroups. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.teams_get_workgroups_v2(async_req=True)\n >>> result = thread.get()\n\n :param async_req bool: execute request asynchronously\n :param _preload_content: if False, the urllib3.HTTPResponse object will\n be returned without reading/decoding response\n data. Default is True.\n :param _request_timeout: timeout setting for this request. If one\n number provided, it will be total request\n timeout. It can also be a pair (tuple) of\n (connection, read) timeouts.\n :return: TeamsV2Response\n If the method is called asynchronously,\n returns the request thread.\n ' kwargs['_return_http_data_only'] = True return self.teams_get_workgroups_v2_with_http_info(**kwargs)
-4,514,242,222,972,709,000
Get all available Workgroups # noqa: E501 Operation to get IDs and names for all available Workgroups. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.teams_get_workgroups_v2(async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: TeamsV2Response If the method is called asynchronously, returns the request thread.
pycherwell/api/teams_api.py
teams_get_workgroups_v2
greenpau/pycherwell
python
def teams_get_workgroups_v2(self, **kwargs): 'Get all available Workgroups # noqa: E501\n\n Operation to get IDs and names for all available Workgroups. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.teams_get_workgroups_v2(async_req=True)\n >>> result = thread.get()\n\n :param async_req bool: execute request asynchronously\n :param _preload_content: if False, the urllib3.HTTPResponse object will\n be returned without reading/decoding response\n data. Default is True.\n :param _request_timeout: timeout setting for this request. If one\n number provided, it will be total request\n timeout. It can also be a pair (tuple) of\n (connection, read) timeouts.\n :return: TeamsV2Response\n If the method is called asynchronously,\n returns the request thread.\n ' kwargs['_return_http_data_only'] = True return self.teams_get_workgroups_v2_with_http_info(**kwargs)
def teams_get_workgroups_v2_with_http_info(self, **kwargs): 'Get all available Workgroups # noqa: E501\n\n Operation to get IDs and names for all available Workgroups. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.teams_get_workgroups_v2_with_http_info(async_req=True)\n >>> result = thread.get()\n\n :param async_req bool: execute request asynchronously\n :param _return_http_data_only: response data without head status code\n and headers\n :param _preload_content: if False, the urllib3.HTTPResponse object will\n be returned without reading/decoding response\n data. Default is True.\n :param _request_timeout: timeout setting for this request. If one\n number provided, it will be total request\n timeout. It can also be a pair (tuple) of\n (connection, read) timeouts.\n :return: tuple(TeamsV2Response, status_code(int), headers(HTTPHeaderDict))\n If the method is called asynchronously,\n returns the request thread.\n ' local_var_params = locals() all_params = [] all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') for (key, val) in six.iteritems(local_var_params['kwargs']): if (key not in all_params): raise ApiTypeError(("Got an unexpected keyword argument '%s' to method teams_get_workgroups_v2" % key)) local_var_params[key] = val del local_var_params['kwargs'] collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None header_params['Accept'] = self.api_client.select_header_accept(['application/json']) auth_settings = [] return self.api_client.call_api('/api/V2/getworkgroups', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='TeamsV2Response', auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats)
695,146,712,052,608,400
Get all available Workgroups # noqa: E501 Operation to get IDs and names for all available Workgroups. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.teams_get_workgroups_v2_with_http_info(async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param _return_http_data_only: response data without head status code and headers :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: tuple(TeamsV2Response, status_code(int), headers(HTTPHeaderDict)) If the method is called asynchronously, returns the request thread.
pycherwell/api/teams_api.py
teams_get_workgroups_v2_with_http_info
greenpau/pycherwell
python
def teams_get_workgroups_v2_with_http_info(self, **kwargs): 'Get all available Workgroups # noqa: E501\n\n Operation to get IDs and names for all available Workgroups. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.teams_get_workgroups_v2_with_http_info(async_req=True)\n >>> result = thread.get()\n\n :param async_req bool: execute request asynchronously\n :param _return_http_data_only: response data without head status code\n and headers\n :param _preload_content: if False, the urllib3.HTTPResponse object will\n be returned without reading/decoding response\n data. Default is True.\n :param _request_timeout: timeout setting for this request. If one\n number provided, it will be total request\n timeout. It can also be a pair (tuple) of\n (connection, read) timeouts.\n :return: tuple(TeamsV2Response, status_code(int), headers(HTTPHeaderDict))\n If the method is called asynchronously,\n returns the request thread.\n ' local_var_params = locals() all_params = [] all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') for (key, val) in six.iteritems(local_var_params['kwargs']): if (key not in all_params): raise ApiTypeError(("Got an unexpected keyword argument '%s' to method teams_get_workgroups_v2" % key)) local_var_params[key] = val del local_var_params['kwargs'] collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None header_params['Accept'] = self.api_client.select_header_accept(['application/json']) auth_settings = [] return self.api_client.call_api('/api/V2/getworkgroups', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='TeamsV2Response', auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats)
def teams_remove_customer_from_workgroup_v1(self, workgroupid, customerrecordid, **kwargs): 'Remove a customer from a Workgroup # noqa: E501\n\n Operation to remove a Customer from a Workgroup. To remove, specify the Workgroup ID and the Customer Record ID. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.teams_remove_customer_from_workgroup_v1(workgroupid, customerrecordid, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool: execute request asynchronously\n :param str workgroupid: Specify the Workgroup ID. (required)\n :param str customerrecordid: Specify the Customer record ID. (required)\n :param _preload_content: if False, the urllib3.HTTPResponse object will\n be returned without reading/decoding response\n data. Default is True.\n :param _request_timeout: timeout setting for this request. If one\n number provided, it will be total request\n timeout. It can also be a pair (tuple) of\n (connection, read) timeouts.\n :return: RemoveCustomerFromWorkgroupResponse\n If the method is called asynchronously,\n returns the request thread.\n ' kwargs['_return_http_data_only'] = True return self.teams_remove_customer_from_workgroup_v1_with_http_info(workgroupid, customerrecordid, **kwargs)
-605,956,643,830,825,500
Remove a customer from a Workgroup # noqa: E501 Operation to remove a Customer from a Workgroup. To remove, specify the Workgroup ID and the Customer Record ID. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.teams_remove_customer_from_workgroup_v1(workgroupid, customerrecordid, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str workgroupid: Specify the Workgroup ID. (required) :param str customerrecordid: Specify the Customer record ID. (required) :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: RemoveCustomerFromWorkgroupResponse If the method is called asynchronously, returns the request thread.
pycherwell/api/teams_api.py
teams_remove_customer_from_workgroup_v1
greenpau/pycherwell
python
def teams_remove_customer_from_workgroup_v1(self, workgroupid, customerrecordid, **kwargs): 'Remove a customer from a Workgroup # noqa: E501\n\n Operation to remove a Customer from a Workgroup. To remove, specify the Workgroup ID and the Customer Record ID. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.teams_remove_customer_from_workgroup_v1(workgroupid, customerrecordid, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool: execute request asynchronously\n :param str workgroupid: Specify the Workgroup ID. (required)\n :param str customerrecordid: Specify the Customer record ID. (required)\n :param _preload_content: if False, the urllib3.HTTPResponse object will\n be returned without reading/decoding response\n data. Default is True.\n :param _request_timeout: timeout setting for this request. If one\n number provided, it will be total request\n timeout. It can also be a pair (tuple) of\n (connection, read) timeouts.\n :return: RemoveCustomerFromWorkgroupResponse\n If the method is called asynchronously,\n returns the request thread.\n ' kwargs['_return_http_data_only'] = True return self.teams_remove_customer_from_workgroup_v1_with_http_info(workgroupid, customerrecordid, **kwargs)
def teams_remove_customer_from_workgroup_v1_with_http_info(self, workgroupid, customerrecordid, **kwargs): 'Remove a customer from a Workgroup # noqa: E501\n\n Operation to remove a Customer from a Workgroup. To remove, specify the Workgroup ID and the Customer Record ID. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.teams_remove_customer_from_workgroup_v1_with_http_info(workgroupid, customerrecordid, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool: execute request asynchronously\n :param str workgroupid: Specify the Workgroup ID. (required)\n :param str customerrecordid: Specify the Customer record ID. (required)\n :param _return_http_data_only: response data without head status code\n and headers\n :param _preload_content: if False, the urllib3.HTTPResponse object will\n be returned without reading/decoding response\n data. Default is True.\n :param _request_timeout: timeout setting for this request. If one\n number provided, it will be total request\n timeout. It can also be a pair (tuple) of\n (connection, read) timeouts.\n :return: tuple(RemoveCustomerFromWorkgroupResponse, status_code(int), headers(HTTPHeaderDict))\n If the method is called asynchronously,\n returns the request thread.\n ' local_var_params = locals() all_params = ['workgroupid', 'customerrecordid'] all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') for (key, val) in six.iteritems(local_var_params['kwargs']): if (key not in all_params): raise ApiTypeError(("Got an unexpected keyword argument '%s' to method teams_remove_customer_from_workgroup_v1" % key)) local_var_params[key] = val del local_var_params['kwargs'] if (self.api_client.client_side_validation and (('workgroupid' not in local_var_params) or (local_var_params['workgroupid'] is None))): raise ApiValueError('Missing the required parameter `workgroupid` when calling `teams_remove_customer_from_workgroup_v1`') if (self.api_client.client_side_validation and (('customerrecordid' not in local_var_params) or (local_var_params['customerrecordid'] is None))): raise ApiValueError('Missing the required parameter `customerrecordid` when calling `teams_remove_customer_from_workgroup_v1`') collection_formats = {} path_params = {} if ('workgroupid' in local_var_params): path_params['workgroupid'] = local_var_params['workgroupid'] if ('customerrecordid' in local_var_params): path_params['customerrecordid'] = local_var_params['customerrecordid'] query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None header_params['Accept'] = self.api_client.select_header_accept(['application/json']) auth_settings = [] return self.api_client.call_api('/api/V1/removecustomerfromworkgroup/workgroupid/{workgroupid}/customerrecordid/{customerrecordid}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='RemoveCustomerFromWorkgroupResponse', auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats)
2,223,508,028,111,744,300
Remove a customer from a Workgroup # noqa: E501 Operation to remove a Customer from a Workgroup. To remove, specify the Workgroup ID and the Customer Record ID. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.teams_remove_customer_from_workgroup_v1_with_http_info(workgroupid, customerrecordid, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str workgroupid: Specify the Workgroup ID. (required) :param str customerrecordid: Specify the Customer record ID. (required) :param _return_http_data_only: response data without head status code and headers :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: tuple(RemoveCustomerFromWorkgroupResponse, status_code(int), headers(HTTPHeaderDict)) If the method is called asynchronously, returns the request thread.
pycherwell/api/teams_api.py
teams_remove_customer_from_workgroup_v1_with_http_info
greenpau/pycherwell
python
def teams_remove_customer_from_workgroup_v1_with_http_info(self, workgroupid, customerrecordid, **kwargs): 'Remove a customer from a Workgroup # noqa: E501\n\n Operation to remove a Customer from a Workgroup. To remove, specify the Workgroup ID and the Customer Record ID. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.teams_remove_customer_from_workgroup_v1_with_http_info(workgroupid, customerrecordid, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool: execute request asynchronously\n :param str workgroupid: Specify the Workgroup ID. (required)\n :param str customerrecordid: Specify the Customer record ID. (required)\n :param _return_http_data_only: response data without head status code\n and headers\n :param _preload_content: if False, the urllib3.HTTPResponse object will\n be returned without reading/decoding response\n data. Default is True.\n :param _request_timeout: timeout setting for this request. If one\n number provided, it will be total request\n timeout. It can also be a pair (tuple) of\n (connection, read) timeouts.\n :return: tuple(RemoveCustomerFromWorkgroupResponse, status_code(int), headers(HTTPHeaderDict))\n If the method is called asynchronously,\n returns the request thread.\n ' local_var_params = locals() all_params = ['workgroupid', 'customerrecordid'] all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') for (key, val) in six.iteritems(local_var_params['kwargs']): if (key not in all_params): raise ApiTypeError(("Got an unexpected keyword argument '%s' to method teams_remove_customer_from_workgroup_v1" % key)) local_var_params[key] = val del local_var_params['kwargs'] if (self.api_client.client_side_validation and (('workgroupid' not in local_var_params) or (local_var_params['workgroupid'] is None))): raise ApiValueError('Missing the required parameter `workgroupid` when calling `teams_remove_customer_from_workgroup_v1`') if (self.api_client.client_side_validation and (('customerrecordid' not in local_var_params) or (local_var_params['customerrecordid'] is None))): raise ApiValueError('Missing the required parameter `customerrecordid` when calling `teams_remove_customer_from_workgroup_v1`') collection_formats = {} path_params = {} if ('workgroupid' in local_var_params): path_params['workgroupid'] = local_var_params['workgroupid'] if ('customerrecordid' in local_var_params): path_params['customerrecordid'] = local_var_params['customerrecordid'] query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None header_params['Accept'] = self.api_client.select_header_accept(['application/json']) auth_settings = [] return self.api_client.call_api('/api/V1/removecustomerfromworkgroup/workgroupid/{workgroupid}/customerrecordid/{customerrecordid}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='RemoveCustomerFromWorkgroupResponse', auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats)
def teams_remove_user_from_team_v1(self, team_id, userrecordid, **kwargs): 'Operation to remove a User from a Team. # noqa: E501\n\n Operation to remove a User from a Team. To get the User\'s record ID, use "Get a User by login ID" or "Get a User by public ID." To get a Team\'s internal ID, use "Get all available Teams." # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.teams_remove_user_from_team_v1(team_id, userrecordid, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool: execute request asynchronously\n :param str team_id: Specify the internal ID of the Team. (required)\n :param str userrecordid: Specify the record ID of the User to remove. (required)\n :param _preload_content: if False, the urllib3.HTTPResponse object will\n be returned without reading/decoding response\n data. Default is True.\n :param _request_timeout: timeout setting for this request. If one\n number provided, it will be total request\n timeout. It can also be a pair (tuple) of\n (connection, read) timeouts.\n :return: None\n If the method is called asynchronously,\n returns the request thread.\n ' kwargs['_return_http_data_only'] = True return self.teams_remove_user_from_team_v1_with_http_info(team_id, userrecordid, **kwargs)
8,563,310,307,737,447,000
Operation to remove a User from a Team. # noqa: E501 Operation to remove a User from a Team. To get the User's record ID, use "Get a User by login ID" or "Get a User by public ID." To get a Team's internal ID, use "Get all available Teams." # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.teams_remove_user_from_team_v1(team_id, userrecordid, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str team_id: Specify the internal ID of the Team. (required) :param str userrecordid: Specify the record ID of the User to remove. (required) :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: None If the method is called asynchronously, returns the request thread.
pycherwell/api/teams_api.py
teams_remove_user_from_team_v1
greenpau/pycherwell
python
def teams_remove_user_from_team_v1(self, team_id, userrecordid, **kwargs): 'Operation to remove a User from a Team. # noqa: E501\n\n Operation to remove a User from a Team. To get the User\'s record ID, use "Get a User by login ID" or "Get a User by public ID." To get a Team\'s internal ID, use "Get all available Teams." # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.teams_remove_user_from_team_v1(team_id, userrecordid, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool: execute request asynchronously\n :param str team_id: Specify the internal ID of the Team. (required)\n :param str userrecordid: Specify the record ID of the User to remove. (required)\n :param _preload_content: if False, the urllib3.HTTPResponse object will\n be returned without reading/decoding response\n data. Default is True.\n :param _request_timeout: timeout setting for this request. If one\n number provided, it will be total request\n timeout. It can also be a pair (tuple) of\n (connection, read) timeouts.\n :return: None\n If the method is called asynchronously,\n returns the request thread.\n ' kwargs['_return_http_data_only'] = True return self.teams_remove_user_from_team_v1_with_http_info(team_id, userrecordid, **kwargs)
def teams_remove_user_from_team_v1_with_http_info(self, team_id, userrecordid, **kwargs): 'Operation to remove a User from a Team. # noqa: E501\n\n Operation to remove a User from a Team. To get the User\'s record ID, use "Get a User by login ID" or "Get a User by public ID." To get a Team\'s internal ID, use "Get all available Teams." # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.teams_remove_user_from_team_v1_with_http_info(team_id, userrecordid, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool: execute request asynchronously\n :param str team_id: Specify the internal ID of the Team. (required)\n :param str userrecordid: Specify the record ID of the User to remove. (required)\n :param _return_http_data_only: response data without head status code\n and headers\n :param _preload_content: if False, the urllib3.HTTPResponse object will\n be returned without reading/decoding response\n data. Default is True.\n :param _request_timeout: timeout setting for this request. If one\n number provided, it will be total request\n timeout. It can also be a pair (tuple) of\n (connection, read) timeouts.\n :return: None\n If the method is called asynchronously,\n returns the request thread.\n ' local_var_params = locals() all_params = ['team_id', 'userrecordid'] all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') for (key, val) in six.iteritems(local_var_params['kwargs']): if (key not in all_params): raise ApiTypeError(("Got an unexpected keyword argument '%s' to method teams_remove_user_from_team_v1" % key)) local_var_params[key] = val del local_var_params['kwargs'] if (self.api_client.client_side_validation and (('team_id' not in local_var_params) or (local_var_params['team_id'] is None))): raise ApiValueError('Missing the required parameter `team_id` when calling `teams_remove_user_from_team_v1`') if (self.api_client.client_side_validation and (('userrecordid' not in local_var_params) or (local_var_params['userrecordid'] is None))): raise ApiValueError('Missing the required parameter `userrecordid` when calling `teams_remove_user_from_team_v1`') collection_formats = {} path_params = {} if ('team_id' in local_var_params): path_params['teamId'] = local_var_params['team_id'] if ('userrecordid' in local_var_params): path_params['userrecordid'] = local_var_params['userrecordid'] query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None auth_settings = [] return self.api_client.call_api('/api/V1/removeuserfromteam/teamid/{teamId}/userrecordid/{userrecordid}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats)
-6,288,197,175,699,464,000
Operation to remove a User from a Team. # noqa: E501 Operation to remove a User from a Team. To get the User's record ID, use "Get a User by login ID" or "Get a User by public ID." To get a Team's internal ID, use "Get all available Teams." # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.teams_remove_user_from_team_v1_with_http_info(team_id, userrecordid, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str team_id: Specify the internal ID of the Team. (required) :param str userrecordid: Specify the record ID of the User to remove. (required) :param _return_http_data_only: response data without head status code and headers :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: None If the method is called asynchronously, returns the request thread.
pycherwell/api/teams_api.py
teams_remove_user_from_team_v1_with_http_info
greenpau/pycherwell
python
def teams_remove_user_from_team_v1_with_http_info(self, team_id, userrecordid, **kwargs): 'Operation to remove a User from a Team. # noqa: E501\n\n Operation to remove a User from a Team. To get the User\'s record ID, use "Get a User by login ID" or "Get a User by public ID." To get a Team\'s internal ID, use "Get all available Teams." # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.teams_remove_user_from_team_v1_with_http_info(team_id, userrecordid, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool: execute request asynchronously\n :param str team_id: Specify the internal ID of the Team. (required)\n :param str userrecordid: Specify the record ID of the User to remove. (required)\n :param _return_http_data_only: response data without head status code\n and headers\n :param _preload_content: if False, the urllib3.HTTPResponse object will\n be returned without reading/decoding response\n data. Default is True.\n :param _request_timeout: timeout setting for this request. If one\n number provided, it will be total request\n timeout. It can also be a pair (tuple) of\n (connection, read) timeouts.\n :return: None\n If the method is called asynchronously,\n returns the request thread.\n ' local_var_params = locals() all_params = ['team_id', 'userrecordid'] all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') for (key, val) in six.iteritems(local_var_params['kwargs']): if (key not in all_params): raise ApiTypeError(("Got an unexpected keyword argument '%s' to method teams_remove_user_from_team_v1" % key)) local_var_params[key] = val del local_var_params['kwargs'] if (self.api_client.client_side_validation and (('team_id' not in local_var_params) or (local_var_params['team_id'] is None))): raise ApiValueError('Missing the required parameter `team_id` when calling `teams_remove_user_from_team_v1`') if (self.api_client.client_side_validation and (('userrecordid' not in local_var_params) or (local_var_params['userrecordid'] is None))): raise ApiValueError('Missing the required parameter `userrecordid` when calling `teams_remove_user_from_team_v1`') collection_formats = {} path_params = {} if ('team_id' in local_var_params): path_params['teamId'] = local_var_params['team_id'] if ('userrecordid' in local_var_params): path_params['userrecordid'] = local_var_params['userrecordid'] query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None auth_settings = [] return self.api_client.call_api('/api/V1/removeuserfromteam/teamid/{teamId}/userrecordid/{userrecordid}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats)
def teams_remove_user_from_team_v2(self, team_id, userrecordid, **kwargs): 'Operation to remove a User from a Team. # noqa: E501\n\n Operation to remove a User from a Team. To get the User\'s record ID, use "Get a User by login ID" or "Get a User by public ID." To get a Team\'s internal ID, use "Get all available Teams." # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.teams_remove_user_from_team_v2(team_id, userrecordid, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool: execute request asynchronously\n :param str team_id: Specify the internal ID of the Team. (required)\n :param str userrecordid: Specify the record ID of the User to remove. (required)\n :param _preload_content: if False, the urllib3.HTTPResponse object will\n be returned without reading/decoding response\n data. Default is True.\n :param _request_timeout: timeout setting for this request. If one\n number provided, it will be total request\n timeout. It can also be a pair (tuple) of\n (connection, read) timeouts.\n :return: RemoveUserFromTeamResponse\n If the method is called asynchronously,\n returns the request thread.\n ' kwargs['_return_http_data_only'] = True return self.teams_remove_user_from_team_v2_with_http_info(team_id, userrecordid, **kwargs)
3,164,772,470,016,654,000
Operation to remove a User from a Team. # noqa: E501 Operation to remove a User from a Team. To get the User's record ID, use "Get a User by login ID" or "Get a User by public ID." To get a Team's internal ID, use "Get all available Teams." # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.teams_remove_user_from_team_v2(team_id, userrecordid, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str team_id: Specify the internal ID of the Team. (required) :param str userrecordid: Specify the record ID of the User to remove. (required) :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: RemoveUserFromTeamResponse If the method is called asynchronously, returns the request thread.
pycherwell/api/teams_api.py
teams_remove_user_from_team_v2
greenpau/pycherwell
python
def teams_remove_user_from_team_v2(self, team_id, userrecordid, **kwargs): 'Operation to remove a User from a Team. # noqa: E501\n\n Operation to remove a User from a Team. To get the User\'s record ID, use "Get a User by login ID" or "Get a User by public ID." To get a Team\'s internal ID, use "Get all available Teams." # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.teams_remove_user_from_team_v2(team_id, userrecordid, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool: execute request asynchronously\n :param str team_id: Specify the internal ID of the Team. (required)\n :param str userrecordid: Specify the record ID of the User to remove. (required)\n :param _preload_content: if False, the urllib3.HTTPResponse object will\n be returned without reading/decoding response\n data. Default is True.\n :param _request_timeout: timeout setting for this request. If one\n number provided, it will be total request\n timeout. It can also be a pair (tuple) of\n (connection, read) timeouts.\n :return: RemoveUserFromTeamResponse\n If the method is called asynchronously,\n returns the request thread.\n ' kwargs['_return_http_data_only'] = True return self.teams_remove_user_from_team_v2_with_http_info(team_id, userrecordid, **kwargs)
def teams_remove_user_from_team_v2_with_http_info(self, team_id, userrecordid, **kwargs): 'Operation to remove a User from a Team. # noqa: E501\n\n Operation to remove a User from a Team. To get the User\'s record ID, use "Get a User by login ID" or "Get a User by public ID." To get a Team\'s internal ID, use "Get all available Teams." # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.teams_remove_user_from_team_v2_with_http_info(team_id, userrecordid, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool: execute request asynchronously\n :param str team_id: Specify the internal ID of the Team. (required)\n :param str userrecordid: Specify the record ID of the User to remove. (required)\n :param _return_http_data_only: response data without head status code\n and headers\n :param _preload_content: if False, the urllib3.HTTPResponse object will\n be returned without reading/decoding response\n data. Default is True.\n :param _request_timeout: timeout setting for this request. If one\n number provided, it will be total request\n timeout. It can also be a pair (tuple) of\n (connection, read) timeouts.\n :return: tuple(RemoveUserFromTeamResponse, status_code(int), headers(HTTPHeaderDict))\n If the method is called asynchronously,\n returns the request thread.\n ' local_var_params = locals() all_params = ['team_id', 'userrecordid'] all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') for (key, val) in six.iteritems(local_var_params['kwargs']): if (key not in all_params): raise ApiTypeError(("Got an unexpected keyword argument '%s' to method teams_remove_user_from_team_v2" % key)) local_var_params[key] = val del local_var_params['kwargs'] if (self.api_client.client_side_validation and (('team_id' not in local_var_params) or (local_var_params['team_id'] is None))): raise ApiValueError('Missing the required parameter `team_id` when calling `teams_remove_user_from_team_v2`') if (self.api_client.client_side_validation and (('userrecordid' not in local_var_params) or (local_var_params['userrecordid'] is None))): raise ApiValueError('Missing the required parameter `userrecordid` when calling `teams_remove_user_from_team_v2`') collection_formats = {} path_params = {} if ('team_id' in local_var_params): path_params['teamId'] = local_var_params['team_id'] if ('userrecordid' in local_var_params): path_params['userrecordid'] = local_var_params['userrecordid'] query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None header_params['Accept'] = self.api_client.select_header_accept(['application/json']) auth_settings = [] return self.api_client.call_api('/api/V2/removeuserfromteam/teamid/{teamId}/userrecordid/{userrecordid}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='RemoveUserFromTeamResponse', auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats)
-5,760,425,905,116,669,000
Operation to remove a User from a Team. # noqa: E501 Operation to remove a User from a Team. To get the User's record ID, use "Get a User by login ID" or "Get a User by public ID." To get a Team's internal ID, use "Get all available Teams." # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.teams_remove_user_from_team_v2_with_http_info(team_id, userrecordid, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str team_id: Specify the internal ID of the Team. (required) :param str userrecordid: Specify the record ID of the User to remove. (required) :param _return_http_data_only: response data without head status code and headers :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: tuple(RemoveUserFromTeamResponse, status_code(int), headers(HTTPHeaderDict)) If the method is called asynchronously, returns the request thread.
pycherwell/api/teams_api.py
teams_remove_user_from_team_v2_with_http_info
greenpau/pycherwell
python
def teams_remove_user_from_team_v2_with_http_info(self, team_id, userrecordid, **kwargs): 'Operation to remove a User from a Team. # noqa: E501\n\n Operation to remove a User from a Team. To get the User\'s record ID, use "Get a User by login ID" or "Get a User by public ID." To get a Team\'s internal ID, use "Get all available Teams." # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.teams_remove_user_from_team_v2_with_http_info(team_id, userrecordid, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool: execute request asynchronously\n :param str team_id: Specify the internal ID of the Team. (required)\n :param str userrecordid: Specify the record ID of the User to remove. (required)\n :param _return_http_data_only: response data without head status code\n and headers\n :param _preload_content: if False, the urllib3.HTTPResponse object will\n be returned without reading/decoding response\n data. Default is True.\n :param _request_timeout: timeout setting for this request. If one\n number provided, it will be total request\n timeout. It can also be a pair (tuple) of\n (connection, read) timeouts.\n :return: tuple(RemoveUserFromTeamResponse, status_code(int), headers(HTTPHeaderDict))\n If the method is called asynchronously,\n returns the request thread.\n ' local_var_params = locals() all_params = ['team_id', 'userrecordid'] all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') for (key, val) in six.iteritems(local_var_params['kwargs']): if (key not in all_params): raise ApiTypeError(("Got an unexpected keyword argument '%s' to method teams_remove_user_from_team_v2" % key)) local_var_params[key] = val del local_var_params['kwargs'] if (self.api_client.client_side_validation and (('team_id' not in local_var_params) or (local_var_params['team_id'] is None))): raise ApiValueError('Missing the required parameter `team_id` when calling `teams_remove_user_from_team_v2`') if (self.api_client.client_side_validation and (('userrecordid' not in local_var_params) or (local_var_params['userrecordid'] is None))): raise ApiValueError('Missing the required parameter `userrecordid` when calling `teams_remove_user_from_team_v2`') collection_formats = {} path_params = {} if ('team_id' in local_var_params): path_params['teamId'] = local_var_params['team_id'] if ('userrecordid' in local_var_params): path_params['userrecordid'] = local_var_params['userrecordid'] query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None header_params['Accept'] = self.api_client.select_header_accept(['application/json']) auth_settings = [] return self.api_client.call_api('/api/V2/removeuserfromteam/teamid/{teamId}/userrecordid/{userrecordid}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='RemoveUserFromTeamResponse', auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats)
def teams_save_team_member_v1(self, save_team_member_request, **kwargs): "Add or Update a team member # noqa: E501\n\n Operation to add or update a Team Member. To add or update, specify User ID, Team ID, and if Team Manager. Optionally, set the Team as the User's default Team. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.teams_save_team_member_v1(save_team_member_request, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool: execute request asynchronously\n :param SaveTeamMemberRequest save_team_member_request: The request object to add or update a Team Member. User recID specifies the User to add or update. TeamId specifies the Team to update. IsTeamManager specifies whether the User is a Team Manager, and SetAsDefaultTeam specifies whether to set this Team as the User's default team. UserRecId, TeamId, and IsTeamManager are required. (required)\n :param _preload_content: if False, the urllib3.HTTPResponse object will\n be returned without reading/decoding response\n data. Default is True.\n :param _request_timeout: timeout setting for this request. If one\n number provided, it will be total request\n timeout. It can also be a pair (tuple) of\n (connection, read) timeouts.\n :return: SaveTeamMemberResponse\n If the method is called asynchronously,\n returns the request thread.\n " kwargs['_return_http_data_only'] = True return self.teams_save_team_member_v1_with_http_info(save_team_member_request, **kwargs)
-4,464,309,968,757,101,000
Add or Update a team member # noqa: E501 Operation to add or update a Team Member. To add or update, specify User ID, Team ID, and if Team Manager. Optionally, set the Team as the User's default Team. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.teams_save_team_member_v1(save_team_member_request, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param SaveTeamMemberRequest save_team_member_request: The request object to add or update a Team Member. User recID specifies the User to add or update. TeamId specifies the Team to update. IsTeamManager specifies whether the User is a Team Manager, and SetAsDefaultTeam specifies whether to set this Team as the User's default team. UserRecId, TeamId, and IsTeamManager are required. (required) :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: SaveTeamMemberResponse If the method is called asynchronously, returns the request thread.
pycherwell/api/teams_api.py
teams_save_team_member_v1
greenpau/pycherwell
python
def teams_save_team_member_v1(self, save_team_member_request, **kwargs): "Add or Update a team member # noqa: E501\n\n Operation to add or update a Team Member. To add or update, specify User ID, Team ID, and if Team Manager. Optionally, set the Team as the User's default Team. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.teams_save_team_member_v1(save_team_member_request, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool: execute request asynchronously\n :param SaveTeamMemberRequest save_team_member_request: The request object to add or update a Team Member. User recID specifies the User to add or update. TeamId specifies the Team to update. IsTeamManager specifies whether the User is a Team Manager, and SetAsDefaultTeam specifies whether to set this Team as the User's default team. UserRecId, TeamId, and IsTeamManager are required. (required)\n :param _preload_content: if False, the urllib3.HTTPResponse object will\n be returned without reading/decoding response\n data. Default is True.\n :param _request_timeout: timeout setting for this request. If one\n number provided, it will be total request\n timeout. It can also be a pair (tuple) of\n (connection, read) timeouts.\n :return: SaveTeamMemberResponse\n If the method is called asynchronously,\n returns the request thread.\n " kwargs['_return_http_data_only'] = True return self.teams_save_team_member_v1_with_http_info(save_team_member_request, **kwargs)
def teams_save_team_member_v1_with_http_info(self, save_team_member_request, **kwargs): "Add or Update a team member # noqa: E501\n\n Operation to add or update a Team Member. To add or update, specify User ID, Team ID, and if Team Manager. Optionally, set the Team as the User's default Team. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.teams_save_team_member_v1_with_http_info(save_team_member_request, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool: execute request asynchronously\n :param SaveTeamMemberRequest save_team_member_request: The request object to add or update a Team Member. User recID specifies the User to add or update. TeamId specifies the Team to update. IsTeamManager specifies whether the User is a Team Manager, and SetAsDefaultTeam specifies whether to set this Team as the User's default team. UserRecId, TeamId, and IsTeamManager are required. (required)\n :param _return_http_data_only: response data without head status code\n and headers\n :param _preload_content: if False, the urllib3.HTTPResponse object will\n be returned without reading/decoding response\n data. Default is True.\n :param _request_timeout: timeout setting for this request. If one\n number provided, it will be total request\n timeout. It can also be a pair (tuple) of\n (connection, read) timeouts.\n :return: tuple(SaveTeamMemberResponse, status_code(int), headers(HTTPHeaderDict))\n If the method is called asynchronously,\n returns the request thread.\n " local_var_params = locals() all_params = ['save_team_member_request'] all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') for (key, val) in six.iteritems(local_var_params['kwargs']): if (key not in all_params): raise ApiTypeError(("Got an unexpected keyword argument '%s' to method teams_save_team_member_v1" % key)) local_var_params[key] = val del local_var_params['kwargs'] if (self.api_client.client_side_validation and (('save_team_member_request' not in local_var_params) or (local_var_params['save_team_member_request'] is None))): raise ApiValueError('Missing the required parameter `save_team_member_request` when calling `teams_save_team_member_v1`') collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if ('save_team_member_request' in local_var_params): body_params = local_var_params['save_team_member_request'] header_params['Accept'] = self.api_client.select_header_accept(['application/json']) header_params['Content-Type'] = self.api_client.select_header_content_type(['application/json']) auth_settings = [] return self.api_client.call_api('/api/V1/saveteammember', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='SaveTeamMemberResponse', auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats)
-5,117,426,151,271,859,000
Add or Update a team member # noqa: E501 Operation to add or update a Team Member. To add or update, specify User ID, Team ID, and if Team Manager. Optionally, set the Team as the User's default Team. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.teams_save_team_member_v1_with_http_info(save_team_member_request, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param SaveTeamMemberRequest save_team_member_request: The request object to add or update a Team Member. User recID specifies the User to add or update. TeamId specifies the Team to update. IsTeamManager specifies whether the User is a Team Manager, and SetAsDefaultTeam specifies whether to set this Team as the User's default team. UserRecId, TeamId, and IsTeamManager are required. (required) :param _return_http_data_only: response data without head status code and headers :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: tuple(SaveTeamMemberResponse, status_code(int), headers(HTTPHeaderDict)) If the method is called asynchronously, returns the request thread.
pycherwell/api/teams_api.py
teams_save_team_member_v1_with_http_info
greenpau/pycherwell
python
def teams_save_team_member_v1_with_http_info(self, save_team_member_request, **kwargs): "Add or Update a team member # noqa: E501\n\n Operation to add or update a Team Member. To add or update, specify User ID, Team ID, and if Team Manager. Optionally, set the Team as the User's default Team. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.teams_save_team_member_v1_with_http_info(save_team_member_request, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool: execute request asynchronously\n :param SaveTeamMemberRequest save_team_member_request: The request object to add or update a Team Member. User recID specifies the User to add or update. TeamId specifies the Team to update. IsTeamManager specifies whether the User is a Team Manager, and SetAsDefaultTeam specifies whether to set this Team as the User's default team. UserRecId, TeamId, and IsTeamManager are required. (required)\n :param _return_http_data_only: response data without head status code\n and headers\n :param _preload_content: if False, the urllib3.HTTPResponse object will\n be returned without reading/decoding response\n data. Default is True.\n :param _request_timeout: timeout setting for this request. If one\n number provided, it will be total request\n timeout. It can also be a pair (tuple) of\n (connection, read) timeouts.\n :return: tuple(SaveTeamMemberResponse, status_code(int), headers(HTTPHeaderDict))\n If the method is called asynchronously,\n returns the request thread.\n " local_var_params = locals() all_params = ['save_team_member_request'] all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') for (key, val) in six.iteritems(local_var_params['kwargs']): if (key not in all_params): raise ApiTypeError(("Got an unexpected keyword argument '%s' to method teams_save_team_member_v1" % key)) local_var_params[key] = val del local_var_params['kwargs'] if (self.api_client.client_side_validation and (('save_team_member_request' not in local_var_params) or (local_var_params['save_team_member_request'] is None))): raise ApiValueError('Missing the required parameter `save_team_member_request` when calling `teams_save_team_member_v1`') collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if ('save_team_member_request' in local_var_params): body_params = local_var_params['save_team_member_request'] header_params['Accept'] = self.api_client.select_header_accept(['application/json']) header_params['Content-Type'] = self.api_client.select_header_content_type(['application/json']) auth_settings = [] return self.api_client.call_api('/api/V1/saveteammember', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='SaveTeamMemberResponse', auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats)
def teams_save_team_v1(self, team_save_request, **kwargs): 'Create or update a team # noqa: E501\n\n Operation to create or update a Team or Workgroup. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.teams_save_team_v1(team_save_request, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool: execute request asynchronously\n :param TeamSaveRequest team_save_request: Request object to create Teams or Workgroups. To create a Team, use teamType and teamName. To update a team, use teamID. Team type values must be User or CustomerWorkgroup. The teamType cannot be changed for existing Teams or Workgroups. (required)\n :param _preload_content: if False, the urllib3.HTTPResponse object will\n be returned without reading/decoding response\n data. Default is True.\n :param _request_timeout: timeout setting for this request. If one\n number provided, it will be total request\n timeout. It can also be a pair (tuple) of\n (connection, read) timeouts.\n :return: TeamSaveResponse\n If the method is called asynchronously,\n returns the request thread.\n ' kwargs['_return_http_data_only'] = True return self.teams_save_team_v1_with_http_info(team_save_request, **kwargs)
-772,596,025,537,764,400
Create or update a team # noqa: E501 Operation to create or update a Team or Workgroup. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.teams_save_team_v1(team_save_request, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param TeamSaveRequest team_save_request: Request object to create Teams or Workgroups. To create a Team, use teamType and teamName. To update a team, use teamID. Team type values must be User or CustomerWorkgroup. The teamType cannot be changed for existing Teams or Workgroups. (required) :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: TeamSaveResponse If the method is called asynchronously, returns the request thread.
pycherwell/api/teams_api.py
teams_save_team_v1
greenpau/pycherwell
python
def teams_save_team_v1(self, team_save_request, **kwargs): 'Create or update a team # noqa: E501\n\n Operation to create or update a Team or Workgroup. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.teams_save_team_v1(team_save_request, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool: execute request asynchronously\n :param TeamSaveRequest team_save_request: Request object to create Teams or Workgroups. To create a Team, use teamType and teamName. To update a team, use teamID. Team type values must be User or CustomerWorkgroup. The teamType cannot be changed for existing Teams or Workgroups. (required)\n :param _preload_content: if False, the urllib3.HTTPResponse object will\n be returned without reading/decoding response\n data. Default is True.\n :param _request_timeout: timeout setting for this request. If one\n number provided, it will be total request\n timeout. It can also be a pair (tuple) of\n (connection, read) timeouts.\n :return: TeamSaveResponse\n If the method is called asynchronously,\n returns the request thread.\n ' kwargs['_return_http_data_only'] = True return self.teams_save_team_v1_with_http_info(team_save_request, **kwargs)
def teams_save_team_v1_with_http_info(self, team_save_request, **kwargs): 'Create or update a team # noqa: E501\n\n Operation to create or update a Team or Workgroup. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.teams_save_team_v1_with_http_info(team_save_request, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool: execute request asynchronously\n :param TeamSaveRequest team_save_request: Request object to create Teams or Workgroups. To create a Team, use teamType and teamName. To update a team, use teamID. Team type values must be User or CustomerWorkgroup. The teamType cannot be changed for existing Teams or Workgroups. (required)\n :param _return_http_data_only: response data without head status code\n and headers\n :param _preload_content: if False, the urllib3.HTTPResponse object will\n be returned without reading/decoding response\n data. Default is True.\n :param _request_timeout: timeout setting for this request. If one\n number provided, it will be total request\n timeout. It can also be a pair (tuple) of\n (connection, read) timeouts.\n :return: tuple(TeamSaveResponse, status_code(int), headers(HTTPHeaderDict))\n If the method is called asynchronously,\n returns the request thread.\n ' local_var_params = locals() all_params = ['team_save_request'] all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') for (key, val) in six.iteritems(local_var_params['kwargs']): if (key not in all_params): raise ApiTypeError(("Got an unexpected keyword argument '%s' to method teams_save_team_v1" % key)) local_var_params[key] = val del local_var_params['kwargs'] if (self.api_client.client_side_validation and (('team_save_request' not in local_var_params) or (local_var_params['team_save_request'] is None))): raise ApiValueError('Missing the required parameter `team_save_request` when calling `teams_save_team_v1`') collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if ('team_save_request' in local_var_params): body_params = local_var_params['team_save_request'] header_params['Accept'] = self.api_client.select_header_accept(['application/json']) header_params['Content-Type'] = self.api_client.select_header_content_type(['application/json']) auth_settings = [] return self.api_client.call_api('/api/V1/saveteam', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='TeamSaveResponse', auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats)
-6,858,725,893,052,250,000
Create or update a team # noqa: E501 Operation to create or update a Team or Workgroup. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.teams_save_team_v1_with_http_info(team_save_request, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param TeamSaveRequest team_save_request: Request object to create Teams or Workgroups. To create a Team, use teamType and teamName. To update a team, use teamID. Team type values must be User or CustomerWorkgroup. The teamType cannot be changed for existing Teams or Workgroups. (required) :param _return_http_data_only: response data without head status code and headers :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: tuple(TeamSaveResponse, status_code(int), headers(HTTPHeaderDict)) If the method is called asynchronously, returns the request thread.
pycherwell/api/teams_api.py
teams_save_team_v1_with_http_info
greenpau/pycherwell
python
def teams_save_team_v1_with_http_info(self, team_save_request, **kwargs): 'Create or update a team # noqa: E501\n\n Operation to create or update a Team or Workgroup. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.teams_save_team_v1_with_http_info(team_save_request, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool: execute request asynchronously\n :param TeamSaveRequest team_save_request: Request object to create Teams or Workgroups. To create a Team, use teamType and teamName. To update a team, use teamID. Team type values must be User or CustomerWorkgroup. The teamType cannot be changed for existing Teams or Workgroups. (required)\n :param _return_http_data_only: response data without head status code\n and headers\n :param _preload_content: if False, the urllib3.HTTPResponse object will\n be returned without reading/decoding response\n data. Default is True.\n :param _request_timeout: timeout setting for this request. If one\n number provided, it will be total request\n timeout. It can also be a pair (tuple) of\n (connection, read) timeouts.\n :return: tuple(TeamSaveResponse, status_code(int), headers(HTTPHeaderDict))\n If the method is called asynchronously,\n returns the request thread.\n ' local_var_params = locals() all_params = ['team_save_request'] all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') for (key, val) in six.iteritems(local_var_params['kwargs']): if (key not in all_params): raise ApiTypeError(("Got an unexpected keyword argument '%s' to method teams_save_team_v1" % key)) local_var_params[key] = val del local_var_params['kwargs'] if (self.api_client.client_side_validation and (('team_save_request' not in local_var_params) or (local_var_params['team_save_request'] is None))): raise ApiValueError('Missing the required parameter `team_save_request` when calling `teams_save_team_v1`') collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if ('team_save_request' in local_var_params): body_params = local_var_params['team_save_request'] header_params['Accept'] = self.api_client.select_header_accept(['application/json']) header_params['Content-Type'] = self.api_client.select_header_content_type(['application/json']) auth_settings = [] return self.api_client.call_api('/api/V1/saveteam', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='TeamSaveResponse', auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats)
def teams_save_workgroup_member_v1(self, save_workgroup_member_request, **kwargs): 'Save the membership status of a Workgroup member. # noqa: E501\n\n Operation to add or update a Workgroup Member. To add or update, specify Customer Record ID, Workgroup ID, and if Workgroup Manager. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.teams_save_workgroup_member_v1(save_workgroup_member_request, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool: execute request asynchronously\n :param SaveWorkgroupMemberRequest save_workgroup_member_request: The request object to add or update a Workgroup Member. CustomerRecordId specifies the Customer to add or update. WorkgroupId specifies the Workgroup to update. CustomerIsWorkgroupManager specifies whether the Customer is a Workgroup Manager. (required)\n :param _preload_content: if False, the urllib3.HTTPResponse object will\n be returned without reading/decoding response\n data. Default is True.\n :param _request_timeout: timeout setting for this request. If one\n number provided, it will be total request\n timeout. It can also be a pair (tuple) of\n (connection, read) timeouts.\n :return: SaveWorkgroupMemberResponse\n If the method is called asynchronously,\n returns the request thread.\n ' kwargs['_return_http_data_only'] = True return self.teams_save_workgroup_member_v1_with_http_info(save_workgroup_member_request, **kwargs)
-662,188,750,291,340,700
Save the membership status of a Workgroup member. # noqa: E501 Operation to add or update a Workgroup Member. To add or update, specify Customer Record ID, Workgroup ID, and if Workgroup Manager. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.teams_save_workgroup_member_v1(save_workgroup_member_request, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param SaveWorkgroupMemberRequest save_workgroup_member_request: The request object to add or update a Workgroup Member. CustomerRecordId specifies the Customer to add or update. WorkgroupId specifies the Workgroup to update. CustomerIsWorkgroupManager specifies whether the Customer is a Workgroup Manager. (required) :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: SaveWorkgroupMemberResponse If the method is called asynchronously, returns the request thread.
pycherwell/api/teams_api.py
teams_save_workgroup_member_v1
greenpau/pycherwell
python
def teams_save_workgroup_member_v1(self, save_workgroup_member_request, **kwargs): 'Save the membership status of a Workgroup member. # noqa: E501\n\n Operation to add or update a Workgroup Member. To add or update, specify Customer Record ID, Workgroup ID, and if Workgroup Manager. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.teams_save_workgroup_member_v1(save_workgroup_member_request, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool: execute request asynchronously\n :param SaveWorkgroupMemberRequest save_workgroup_member_request: The request object to add or update a Workgroup Member. CustomerRecordId specifies the Customer to add or update. WorkgroupId specifies the Workgroup to update. CustomerIsWorkgroupManager specifies whether the Customer is a Workgroup Manager. (required)\n :param _preload_content: if False, the urllib3.HTTPResponse object will\n be returned without reading/decoding response\n data. Default is True.\n :param _request_timeout: timeout setting for this request. If one\n number provided, it will be total request\n timeout. It can also be a pair (tuple) of\n (connection, read) timeouts.\n :return: SaveWorkgroupMemberResponse\n If the method is called asynchronously,\n returns the request thread.\n ' kwargs['_return_http_data_only'] = True return self.teams_save_workgroup_member_v1_with_http_info(save_workgroup_member_request, **kwargs)
def teams_save_workgroup_member_v1_with_http_info(self, save_workgroup_member_request, **kwargs): 'Save the membership status of a Workgroup member. # noqa: E501\n\n Operation to add or update a Workgroup Member. To add or update, specify Customer Record ID, Workgroup ID, and if Workgroup Manager. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.teams_save_workgroup_member_v1_with_http_info(save_workgroup_member_request, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool: execute request asynchronously\n :param SaveWorkgroupMemberRequest save_workgroup_member_request: The request object to add or update a Workgroup Member. CustomerRecordId specifies the Customer to add or update. WorkgroupId specifies the Workgroup to update. CustomerIsWorkgroupManager specifies whether the Customer is a Workgroup Manager. (required)\n :param _return_http_data_only: response data without head status code\n and headers\n :param _preload_content: if False, the urllib3.HTTPResponse object will\n be returned without reading/decoding response\n data. Default is True.\n :param _request_timeout: timeout setting for this request. If one\n number provided, it will be total request\n timeout. It can also be a pair (tuple) of\n (connection, read) timeouts.\n :return: tuple(SaveWorkgroupMemberResponse, status_code(int), headers(HTTPHeaderDict))\n If the method is called asynchronously,\n returns the request thread.\n ' local_var_params = locals() all_params = ['save_workgroup_member_request'] all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') for (key, val) in six.iteritems(local_var_params['kwargs']): if (key not in all_params): raise ApiTypeError(("Got an unexpected keyword argument '%s' to method teams_save_workgroup_member_v1" % key)) local_var_params[key] = val del local_var_params['kwargs'] if (self.api_client.client_side_validation and (('save_workgroup_member_request' not in local_var_params) or (local_var_params['save_workgroup_member_request'] is None))): raise ApiValueError('Missing the required parameter `save_workgroup_member_request` when calling `teams_save_workgroup_member_v1`') collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if ('save_workgroup_member_request' in local_var_params): body_params = local_var_params['save_workgroup_member_request'] header_params['Accept'] = self.api_client.select_header_accept(['application/json']) header_params['Content-Type'] = self.api_client.select_header_content_type(['application/json']) auth_settings = [] return self.api_client.call_api('/api/V1/saveworkgroupmember', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='SaveWorkgroupMemberResponse', auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats)
-1,649,651,297,112,204,000
Save the membership status of a Workgroup member. # noqa: E501 Operation to add or update a Workgroup Member. To add or update, specify Customer Record ID, Workgroup ID, and if Workgroup Manager. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.teams_save_workgroup_member_v1_with_http_info(save_workgroup_member_request, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param SaveWorkgroupMemberRequest save_workgroup_member_request: The request object to add or update a Workgroup Member. CustomerRecordId specifies the Customer to add or update. WorkgroupId specifies the Workgroup to update. CustomerIsWorkgroupManager specifies whether the Customer is a Workgroup Manager. (required) :param _return_http_data_only: response data without head status code and headers :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: tuple(SaveWorkgroupMemberResponse, status_code(int), headers(HTTPHeaderDict)) If the method is called asynchronously, returns the request thread.
pycherwell/api/teams_api.py
teams_save_workgroup_member_v1_with_http_info
greenpau/pycherwell
python
def teams_save_workgroup_member_v1_with_http_info(self, save_workgroup_member_request, **kwargs): 'Save the membership status of a Workgroup member. # noqa: E501\n\n Operation to add or update a Workgroup Member. To add or update, specify Customer Record ID, Workgroup ID, and if Workgroup Manager. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.teams_save_workgroup_member_v1_with_http_info(save_workgroup_member_request, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool: execute request asynchronously\n :param SaveWorkgroupMemberRequest save_workgroup_member_request: The request object to add or update a Workgroup Member. CustomerRecordId specifies the Customer to add or update. WorkgroupId specifies the Workgroup to update. CustomerIsWorkgroupManager specifies whether the Customer is a Workgroup Manager. (required)\n :param _return_http_data_only: response data without head status code\n and headers\n :param _preload_content: if False, the urllib3.HTTPResponse object will\n be returned without reading/decoding response\n data. Default is True.\n :param _request_timeout: timeout setting for this request. If one\n number provided, it will be total request\n timeout. It can also be a pair (tuple) of\n (connection, read) timeouts.\n :return: tuple(SaveWorkgroupMemberResponse, status_code(int), headers(HTTPHeaderDict))\n If the method is called asynchronously,\n returns the request thread.\n ' local_var_params = locals() all_params = ['save_workgroup_member_request'] all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') for (key, val) in six.iteritems(local_var_params['kwargs']): if (key not in all_params): raise ApiTypeError(("Got an unexpected keyword argument '%s' to method teams_save_workgroup_member_v1" % key)) local_var_params[key] = val del local_var_params['kwargs'] if (self.api_client.client_side_validation and (('save_workgroup_member_request' not in local_var_params) or (local_var_params['save_workgroup_member_request'] is None))): raise ApiValueError('Missing the required parameter `save_workgroup_member_request` when calling `teams_save_workgroup_member_v1`') collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if ('save_workgroup_member_request' in local_var_params): body_params = local_var_params['save_workgroup_member_request'] header_params['Accept'] = self.api_client.select_header_accept(['application/json']) header_params['Content-Type'] = self.api_client.select_header_content_type(['application/json']) auth_settings = [] return self.api_client.call_api('/api/V1/saveworkgroupmember', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='SaveWorkgroupMemberResponse', auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats)
def start_node(self, i, dirname, extra_args=None, rpchost=None, timewait=None, binary=None, stderr=None): 'Start a bitcoind and return RPC connection to it' if (extra_args is None): extra_args = [] if (binary is None): binary = os.getenv('BITCOIND', 'bitcoind') node = TestNode(i, dirname, extra_args, rpchost, timewait, binary, stderr, self.mocktime, coverage_dir=self.options.coveragedir) node.start() node.wait_for_rpc_connection() if (self.options.coveragedir is not None): coverage.write_all_rpc_commands(self.options.coveragedir, node.rpc) return node
6,190,739,323,630,635,000
Start a bitcoind and return RPC connection to it
test/functional/test_framework/test_framework.py
start_node
Ilikenumber0/CoinKhongCoGiaTri
python
def start_node(self, i, dirname, extra_args=None, rpchost=None, timewait=None, binary=None, stderr=None): if (extra_args is None): extra_args = [] if (binary is None): binary = os.getenv('BITCOIND', 'bitcoind') node = TestNode(i, dirname, extra_args, rpchost, timewait, binary, stderr, self.mocktime, coverage_dir=self.options.coveragedir) node.start() node.wait_for_rpc_connection() if (self.options.coveragedir is not None): coverage.write_all_rpc_commands(self.options.coveragedir, node.rpc) return node
def start_nodes(self, num_nodes, dirname, extra_args=None, rpchost=None, timewait=None, binary=None): 'Start multiple bitcoinds, return RPC connections to them' if (extra_args is None): extra_args = ([[]] * num_nodes) if (binary is None): binary = ([None] * num_nodes) assert_equal(len(extra_args), num_nodes) assert_equal(len(binary), num_nodes) nodes = [] try: for i in range(num_nodes): nodes.append(TestNode(i, dirname, extra_args[i], rpchost, timewait=timewait, binary=binary[i], stderr=None, mocktime=self.mocktime, coverage_dir=self.options.coveragedir)) nodes[i].start() for node in nodes: node.wait_for_rpc_connection() except: self.stop_nodes() raise if (self.options.coveragedir is not None): for node in nodes: coverage.write_all_rpc_commands(self.options.coveragedir, node.rpc) return nodes
-8,064,546,972,420,278,000
Start multiple bitcoinds, return RPC connections to them
test/functional/test_framework/test_framework.py
start_nodes
Ilikenumber0/CoinKhongCoGiaTri
python
def start_nodes(self, num_nodes, dirname, extra_args=None, rpchost=None, timewait=None, binary=None): if (extra_args is None): extra_args = ([[]] * num_nodes) if (binary is None): binary = ([None] * num_nodes) assert_equal(len(extra_args), num_nodes) assert_equal(len(binary), num_nodes) nodes = [] try: for i in range(num_nodes): nodes.append(TestNode(i, dirname, extra_args[i], rpchost, timewait=timewait, binary=binary[i], stderr=None, mocktime=self.mocktime, coverage_dir=self.options.coveragedir)) nodes[i].start() for node in nodes: node.wait_for_rpc_connection() except: self.stop_nodes() raise if (self.options.coveragedir is not None): for node in nodes: coverage.write_all_rpc_commands(self.options.coveragedir, node.rpc) return nodes
def stop_node(self, i): 'Stop a bitcoind test node' self.nodes[i].stop_node() while (not self.nodes[i].is_node_stopped()): time.sleep(0.1)
220,452,968,083,122,850
Stop a bitcoind test node
test/functional/test_framework/test_framework.py
stop_node
Ilikenumber0/CoinKhongCoGiaTri
python
def stop_node(self, i): self.nodes[i].stop_node() while (not self.nodes[i].is_node_stopped()): time.sleep(0.1)
def stop_nodes(self): 'Stop multiple bitcoind test nodes' for node in self.nodes: node.stop_node() for node in self.nodes: while (not node.is_node_stopped()): time.sleep(0.1)
1,708,372,296,490,657,800
Stop multiple bitcoind test nodes
test/functional/test_framework/test_framework.py
stop_nodes
Ilikenumber0/CoinKhongCoGiaTri
python
def stop_nodes(self): for node in self.nodes: node.stop_node() for node in self.nodes: while (not node.is_node_stopped()): time.sleep(0.1)
def split_network(self): '\n Split the network of four nodes into nodes 0/1 and 2/3.\n ' disconnect_nodes(self.nodes[1], 2) disconnect_nodes(self.nodes[2], 1) self.sync_all([self.nodes[:2], self.nodes[2:]])
-8,998,982,825,230,729,000
Split the network of four nodes into nodes 0/1 and 2/3.
test/functional/test_framework/test_framework.py
split_network
Ilikenumber0/CoinKhongCoGiaTri
python
def split_network(self): '\n \n ' disconnect_nodes(self.nodes[1], 2) disconnect_nodes(self.nodes[2], 1) self.sync_all([self.nodes[:2], self.nodes[2:]])
def join_network(self): '\n Join the (previously split) network halves together.\n ' connect_nodes_bi(self.nodes, 1, 2) self.sync_all()
3,019,492,878,792,302,600
Join the (previously split) network halves together.
test/functional/test_framework/test_framework.py
join_network
Ilikenumber0/CoinKhongCoGiaTri
python
def join_network(self): '\n \n ' connect_nodes_bi(self.nodes, 1, 2) self.sync_all()
def enable_mocktime(self): 'Enable mocktime for the script.\n\n mocktime may be needed for scripts that use the cached version of the\n blockchain. If the cached version of the blockchain is used without\n mocktime then the mempools will not sync due to IBD.\n\n For backwared compatibility of the python scripts with previous\n versions of the cache, this helper function sets mocktime to Jan 1,\n 2014 + (201 * 10 * 60)' self.mocktime = (1388534400 + ((201 * 10) * 60))
2,907,712,534,402,688,500
Enable mocktime for the script. mocktime may be needed for scripts that use the cached version of the blockchain. If the cached version of the blockchain is used without mocktime then the mempools will not sync due to IBD. For backwared compatibility of the python scripts with previous versions of the cache, this helper function sets mocktime to Jan 1, 2014 + (201 * 10 * 60)
test/functional/test_framework/test_framework.py
enable_mocktime
Ilikenumber0/CoinKhongCoGiaTri
python
def enable_mocktime(self): 'Enable mocktime for the script.\n\n mocktime may be needed for scripts that use the cached version of the\n blockchain. If the cached version of the blockchain is used without\n mocktime then the mempools will not sync due to IBD.\n\n For backwared compatibility of the python scripts with previous\n versions of the cache, this helper function sets mocktime to Jan 1,\n 2014 + (201 * 10 * 60)' self.mocktime = (1388534400 + ((201 * 10) * 60))
def _initialize_chain(self, test_dir, num_nodes, cachedir): 'Initialize a pre-mined blockchain for use by the test.\n\n Create a cache of a 200-block-long chain (with wallet) for MAX_NODES\n Afterward, create num_nodes copies from the cache.' assert (num_nodes <= MAX_NODES) create_cache = False for i in range(MAX_NODES): if (not os.path.isdir(os.path.join(cachedir, ('node' + str(i))))): create_cache = True break if create_cache: self.log.debug('Creating data directories from cached datadir') for i in range(MAX_NODES): if os.path.isdir(os.path.join(cachedir, ('node' + str(i)))): shutil.rmtree(os.path.join(cachedir, ('node' + str(i)))) for i in range(MAX_NODES): datadir = initialize_datadir(cachedir, i) args = [os.getenv('BITCOIND', 'bitcoind'), '-server', '-keypool=1', ('-datadir=' + datadir), '-discover=0'] if (i > 0): args.append(('-connect=127.0.0.1:' + str(p2p_port(0)))) self.nodes.append(TestNode(i, cachedir, extra_args=[], rpchost=None, timewait=None, binary=None, stderr=None, mocktime=self.mocktime, coverage_dir=None)) self.nodes[i].args = args self.nodes[i].start() for node in self.nodes: node.wait_for_rpc_connection() self.enable_mocktime() block_time = (self.mocktime - ((201 * 10) * 60)) for i in range(2): for peer in range(4): for j in range(25): set_node_times(self.nodes, block_time) self.nodes[peer].generate(1) block_time += (10 * 60) sync_blocks(self.nodes) self.stop_nodes() self.nodes = [] self.disable_mocktime() for i in range(MAX_NODES): os.remove(log_filename(cachedir, i, 'debug.log')) os.remove(log_filename(cachedir, i, 'db.log')) os.remove(log_filename(cachedir, i, 'peers.dat')) os.remove(log_filename(cachedir, i, 'fee_estimates.dat')) for i in range(num_nodes): from_dir = os.path.join(cachedir, ('node' + str(i))) to_dir = os.path.join(test_dir, ('node' + str(i))) shutil.copytree(from_dir, to_dir) initialize_datadir(test_dir, i)
-5,239,564,696,218,285,000
Initialize a pre-mined blockchain for use by the test. Create a cache of a 200-block-long chain (with wallet) for MAX_NODES Afterward, create num_nodes copies from the cache.
test/functional/test_framework/test_framework.py
_initialize_chain
Ilikenumber0/CoinKhongCoGiaTri
python
def _initialize_chain(self, test_dir, num_nodes, cachedir): 'Initialize a pre-mined blockchain for use by the test.\n\n Create a cache of a 200-block-long chain (with wallet) for MAX_NODES\n Afterward, create num_nodes copies from the cache.' assert (num_nodes <= MAX_NODES) create_cache = False for i in range(MAX_NODES): if (not os.path.isdir(os.path.join(cachedir, ('node' + str(i))))): create_cache = True break if create_cache: self.log.debug('Creating data directories from cached datadir') for i in range(MAX_NODES): if os.path.isdir(os.path.join(cachedir, ('node' + str(i)))): shutil.rmtree(os.path.join(cachedir, ('node' + str(i)))) for i in range(MAX_NODES): datadir = initialize_datadir(cachedir, i) args = [os.getenv('BITCOIND', 'bitcoind'), '-server', '-keypool=1', ('-datadir=' + datadir), '-discover=0'] if (i > 0): args.append(('-connect=127.0.0.1:' + str(p2p_port(0)))) self.nodes.append(TestNode(i, cachedir, extra_args=[], rpchost=None, timewait=None, binary=None, stderr=None, mocktime=self.mocktime, coverage_dir=None)) self.nodes[i].args = args self.nodes[i].start() for node in self.nodes: node.wait_for_rpc_connection() self.enable_mocktime() block_time = (self.mocktime - ((201 * 10) * 60)) for i in range(2): for peer in range(4): for j in range(25): set_node_times(self.nodes, block_time) self.nodes[peer].generate(1) block_time += (10 * 60) sync_blocks(self.nodes) self.stop_nodes() self.nodes = [] self.disable_mocktime() for i in range(MAX_NODES): os.remove(log_filename(cachedir, i, 'debug.log')) os.remove(log_filename(cachedir, i, 'db.log')) os.remove(log_filename(cachedir, i, 'peers.dat')) os.remove(log_filename(cachedir, i, 'fee_estimates.dat')) for i in range(num_nodes): from_dir = os.path.join(cachedir, ('node' + str(i))) to_dir = os.path.join(test_dir, ('node' + str(i))) shutil.copytree(from_dir, to_dir) initialize_datadir(test_dir, i)
def _initialize_chain_clean(self, test_dir, num_nodes): 'Initialize empty blockchain for use by the test.\n\n Create an empty blockchain and num_nodes wallets.\n Useful if a test case wants complete control over initialization.' for i in range(num_nodes): initialize_datadir(test_dir, i)
2,168,712,097,468,151,000
Initialize empty blockchain for use by the test. Create an empty blockchain and num_nodes wallets. Useful if a test case wants complete control over initialization.
test/functional/test_framework/test_framework.py
_initialize_chain_clean
Ilikenumber0/CoinKhongCoGiaTri
python
def _initialize_chain_clean(self, test_dir, num_nodes): 'Initialize empty blockchain for use by the test.\n\n Create an empty blockchain and num_nodes wallets.\n Useful if a test case wants complete control over initialization.' for i in range(num_nodes): initialize_datadir(test_dir, i)
def main(): 'Main program execution.' (n, h1, h2, h3) = generate_ammonia_sites() nList = [[1, 2, 3], [0], [0], [0]] return ([n, h1, h2, h3], nList)
1,557,158,784,874,016,500
Main program execution.
kappa/lattice/ammonia.py
main
ajkerr0/kappa
python
def main(): (n, h1, h2, h3) = generate_ammonia_sites() nList = [[1, 2, 3], [0], [0], [0]] return ([n, h1, h2, h3], nList)
def generate_ammonia_sites(): 'Generate the locations for the atoms in the ammonia molecule' (x, y) = (np.array([1.0, 0.0, 0.0]), np.array([0.0, 1.0, 0.0])) a = 1.4 n = np.array([0.0, 0.0, 0.0]) h1 = (n + (a * y)) h2 = ((n - ((a * y) / 2.0)) + ((a * x) * (np.sqrt(3) / 2))) h3 = (h2 - ((a * x) * np.sqrt(3))) return (n, h1, h2, h3)
1,256,717,689,122,766,000
Generate the locations for the atoms in the ammonia molecule
kappa/lattice/ammonia.py
generate_ammonia_sites
ajkerr0/kappa
python
def generate_ammonia_sites(): (x, y) = (np.array([1.0, 0.0, 0.0]), np.array([0.0, 1.0, 0.0])) a = 1.4 n = np.array([0.0, 0.0, 0.0]) h1 = (n + (a * y)) h2 = ((n - ((a * y) / 2.0)) + ((a * x) * (np.sqrt(3) / 2))) h3 = (h2 - ((a * x) * np.sqrt(3))) return (n, h1, h2, h3)
def __init__(self, pubs_file_path='pubs.txt', pubs_info_file_path='pubs_info.txt'): 'Takes an EndNote library exported a TXT file (`pubs_file_path`), and an optional TAB-delimited info file path with DOI identifiers (`pubs_info_file_path`), and generates some Markdown formatted output.\n\n Here is an info line from the EndNote:\n\n Winterberg, K. M., and Reznikoff, W. S. (2007). "Screening transposon mutant libraries using full-genome oligonucleotide microarrays." Methods Enzymol, 421, 110-25.\n\n Absolute matching to this format is required.\n\n Expected headers in the TAB-delimited pubs info file are \'doi\', \'highlights\', and \'featured_image\'.\n\n - doi: The DOI of the pub matching to a pubs file path entry.\n - highlights: Brief bullet points about the work. Each pont must be separated from the rest with a \';\' character. HTML tags are OK.\n - featured_image: A URL to an image.\n\n If things are not working, feel free to write to meren at uchicago.edu\n ' self.info = {} self.pubs_dict = {} self.journals_list = [] self.authors_list = [] self.recent_authors_list = [] self.author_links = {} self.pubs_file_path = pubs_file_path self.pubs_info_file_path = pubs_info_file_path
-8,329,738,461,641,885,000
Takes an EndNote library exported a TXT file (`pubs_file_path`), and an optional TAB-delimited info file path with DOI identifiers (`pubs_info_file_path`), and generates some Markdown formatted output. Here is an info line from the EndNote: Winterberg, K. M., and Reznikoff, W. S. (2007). "Screening transposon mutant libraries using full-genome oligonucleotide microarrays." Methods Enzymol, 421, 110-25. Absolute matching to this format is required. Expected headers in the TAB-delimited pubs info file are 'doi', 'highlights', and 'featured_image'. - doi: The DOI of the pub matching to a pubs file path entry. - highlights: Brief bullet points about the work. Each pont must be separated from the rest with a ';' character. HTML tags are OK. - featured_image: A URL to an image. If things are not working, feel free to write to meren at uchicago.edu
pubs.py
__init__
Ibrahimmohamed33/web
python
def __init__(self, pubs_file_path='pubs.txt', pubs_info_file_path='pubs_info.txt'): 'Takes an EndNote library exported a TXT file (`pubs_file_path`), and an optional TAB-delimited info file path with DOI identifiers (`pubs_info_file_path`), and generates some Markdown formatted output.\n\n Here is an info line from the EndNote:\n\n Winterberg, K. M., and Reznikoff, W. S. (2007). "Screening transposon mutant libraries using full-genome oligonucleotide microarrays." Methods Enzymol, 421, 110-25.\n\n Absolute matching to this format is required.\n\n Expected headers in the TAB-delimited pubs info file are \'doi\', \'highlights\', and \'featured_image\'.\n\n - doi: The DOI of the pub matching to a pubs file path entry.\n - highlights: Brief bullet points about the work. Each pont must be separated from the rest with a \';\' character. HTML tags are OK.\n - featured_image: A URL to an image.\n\n If things are not working, feel free to write to meren at uchicago.edu\n ' self.info = {} self.pubs_dict = {} self.journals_list = [] self.authors_list = [] self.recent_authors_list = [] self.author_links = {} self.pubs_file_path = pubs_file_path self.pubs_info_file_path = pubs_info_file_path
def get_markdown_text_for_pub(self, pub): "Gets a dictionary `pub`, returns a markdown formatted text.\n\n An example pub:\n\n {'authors': 'McLellan, S. L., and Eren, A. M.',\n 'doi': '10.1016/j.tim.2014.08.002',\n 'issue': '22(12), 697-706',\n 'title': 'Discovering new indicators of fecal pollution.',\n 'journal': 'Trends Microbiol',\n 'year': 2014}\n " pub_md = [] A = (lambda s: pub_md.append(s)) A('<div class="pub">') A(('<div class=\'altmetric-embed\' data-badge-type=\'donut\' data-doi="%s"></div>' % pub['doi'])) A(('<div class="__dimensions_badge_embed__" data-doi="%s" data-hide-zero-citations="true" data-legend="hover-bottom" data-style="small_circle"></div>' % pub['doi'])) if pub['doi']: A((' <h3><a href="%s" target="_new">%s</a></h3>' % ((' https://doi.org/%s' % pub['doi']), pub['title']))) else: A((' <h3><a href="http://scholar.google.com/scholar?hl=en&q=%s" target="_new">%s</a></h3>' % (('http://scholar.google.com/scholar?hl=en&q=%s' % pub['title'].replace(' ', '+')), pub['title']))) A((' <span class="pub-authors">%s</span>' % self.get_author_highlights(pub))) if (pub['co_first_authors'] and (not pub['co_senior_authors'])): A(' <span class="pub-co-first-authors"><sup>☯</sup>Co-first authors</span>') elif (pub['co_first_authors'] and pub['co_senior_authors']): A(' <span class="pub-co-first-authors"><sup>☯</sup>Co-first authors; <sup>‡</sup>Co-senior authors</span>') elif (pub['co_senior_authors'] and (not pub['co_first_authors'])): A(' <span class="pub-co-first-authors"><sup>‡</sup>Co-senior authors</span>') if (pub['doi'] in self.info): info = self.info[pub['doi']] A((' <div class="%s">' % ('pub-info' if info['featured_image'] else 'pub-info-no-image'))) if info['featured_image']: A(' <div class="pub-featured-image">') A((' <a href="%s"><img src="%s" style="max-width: 100px; max-height: 80px; width: auto; border: none; height: auto; margin: 0 auto; display: block; transform: translateY(15%%);"/></a>' % (info['featured_image'], info['featured_image']))) A(' </div>') highlights = (info['highlights'].split(';') if info['highlights'] else None) if highlights: A((' <div class="%s">' % ('pub-highlights' if info['featured_image'] else 'pub-highlights-no-image'))) A((' %s' % '<br>'.join([('<span style="display: inline-block; padding-bottom: 5px;">- %s</span>' % h) for h in highlights]))) A(' </div>') A(' </div>') A((' <span class="pub-journal"><b>%s</b>, %s.</span>' % (pub['journal'], pub['issue']))) A('</div>\n') return '\n'.join(pub_md)
-6,421,781,647,857,561,000
Gets a dictionary `pub`, returns a markdown formatted text. An example pub: {'authors': 'McLellan, S. L., and Eren, A. M.', 'doi': '10.1016/j.tim.2014.08.002', 'issue': '22(12), 697-706', 'title': 'Discovering new indicators of fecal pollution.', 'journal': 'Trends Microbiol', 'year': 2014}
pubs.py
get_markdown_text_for_pub
Ibrahimmohamed33/web
python
def get_markdown_text_for_pub(self, pub): "Gets a dictionary `pub`, returns a markdown formatted text.\n\n An example pub:\n\n {'authors': 'McLellan, S. L., and Eren, A. M.',\n 'doi': '10.1016/j.tim.2014.08.002',\n 'issue': '22(12), 697-706',\n 'title': 'Discovering new indicators of fecal pollution.',\n 'journal': 'Trends Microbiol',\n 'year': 2014}\n " pub_md = [] A = (lambda s: pub_md.append(s)) A('<div class="pub">') A(('<div class=\'altmetric-embed\' data-badge-type=\'donut\' data-doi="%s"></div>' % pub['doi'])) A(('<div class="__dimensions_badge_embed__" data-doi="%s" data-hide-zero-citations="true" data-legend="hover-bottom" data-style="small_circle"></div>' % pub['doi'])) if pub['doi']: A((' <h3><a href="%s" target="_new">%s</a></h3>' % ((' https://doi.org/%s' % pub['doi']), pub['title']))) else: A((' <h3><a href="http://scholar.google.com/scholar?hl=en&q=%s" target="_new">%s</a></h3>' % (('http://scholar.google.com/scholar?hl=en&q=%s' % pub['title'].replace(' ', '+')), pub['title']))) A((' <span class="pub-authors">%s</span>' % self.get_author_highlights(pub))) if (pub['co_first_authors'] and (not pub['co_senior_authors'])): A(' <span class="pub-co-first-authors"><sup>☯</sup>Co-first authors</span>') elif (pub['co_first_authors'] and pub['co_senior_authors']): A(' <span class="pub-co-first-authors"><sup>☯</sup>Co-first authors; <sup>‡</sup>Co-senior authors</span>') elif (pub['co_senior_authors'] and (not pub['co_first_authors'])): A(' <span class="pub-co-first-authors"><sup>‡</sup>Co-senior authors</span>') if (pub['doi'] in self.info): info = self.info[pub['doi']] A((' <div class="%s">' % ('pub-info' if info['featured_image'] else 'pub-info-no-image'))) if info['featured_image']: A(' <div class="pub-featured-image">') A((' <a href="%s"><img src="%s" style="max-width: 100px; max-height: 80px; width: auto; border: none; height: auto; margin: 0 auto; display: block; transform: translateY(15%%);"/></a>' % (info['featured_image'], info['featured_image']))) A(' </div>') highlights = (info['highlights'].split(';') if info['highlights'] else None) if highlights: A((' <div class="%s">' % ('pub-highlights' if info['featured_image'] else 'pub-highlights-no-image'))) A((' %s' % '<br>'.join([('<span style="display: inline-block; padding-bottom: 5px;">- %s</span>' % h) for h in highlights]))) A(' </div>') A(' </div>') A((' <span class="pub-journal"><b>%s</b>, %s.</span>' % (pub['journal'], pub['issue']))) A('</div>\n') return '\n'.join(pub_md)
@classmethod def register_class_to_dict(cls, clazz, converter, serpent_too=True): 'Registers a custom function that returns a dict representation of objects of the given class.\n The function is called with a single parameter; the object to be converted to a dict.' cls.__custom_class_to_dict_registry[clazz] = converter if serpent_too: try: def serpent_converter(obj, serializer, stream, level): d = converter(obj) serializer.ser_builtins_dict(d, stream, level) serpent.register_class(clazz, serpent_converter) except errors.ProtocolError: pass
1,846,850,711,715,845,400
Registers a custom function that returns a dict representation of objects of the given class. The function is called with a single parameter; the object to be converted to a dict.
Pyro5/serializers.py
register_class_to_dict
gst/Pyro5
python
@classmethod def register_class_to_dict(cls, clazz, converter, serpent_too=True): 'Registers a custom function that returns a dict representation of objects of the given class.\n The function is called with a single parameter; the object to be converted to a dict.' cls.__custom_class_to_dict_registry[clazz] = converter if serpent_too: try: def serpent_converter(obj, serializer, stream, level): d = converter(obj) serializer.ser_builtins_dict(d, stream, level) serpent.register_class(clazz, serpent_converter) except errors.ProtocolError: pass
@classmethod def unregister_class_to_dict(cls, clazz): 'Removes the to-dict conversion function registered for the given class. Objects of the class\n will be serialized by the default mechanism again.' if (clazz in cls.__custom_class_to_dict_registry): del cls.__custom_class_to_dict_registry[clazz] try: serpent.unregister_class(clazz) except errors.ProtocolError: pass
5,616,744,704,108,632,000
Removes the to-dict conversion function registered for the given class. Objects of the class will be serialized by the default mechanism again.
Pyro5/serializers.py
unregister_class_to_dict
gst/Pyro5
python
@classmethod def unregister_class_to_dict(cls, clazz): 'Removes the to-dict conversion function registered for the given class. Objects of the class\n will be serialized by the default mechanism again.' if (clazz in cls.__custom_class_to_dict_registry): del cls.__custom_class_to_dict_registry[clazz] try: serpent.unregister_class(clazz) except errors.ProtocolError: pass
@classmethod def register_dict_to_class(cls, classname, converter): '\n Registers a custom converter function that creates objects from a dict with the given classname tag in it.\n The function is called with two parameters: the classname and the dictionary to convert to an instance of the class.\n ' cls.__custom_dict_to_class_registry[classname] = converter
-4,186,548,957,584,928,000
Registers a custom converter function that creates objects from a dict with the given classname tag in it. The function is called with two parameters: the classname and the dictionary to convert to an instance of the class.
Pyro5/serializers.py
register_dict_to_class
gst/Pyro5
python
@classmethod def register_dict_to_class(cls, classname, converter): '\n Registers a custom converter function that creates objects from a dict with the given classname tag in it.\n The function is called with two parameters: the classname and the dictionary to convert to an instance of the class.\n ' cls.__custom_dict_to_class_registry[classname] = converter
@classmethod def unregister_dict_to_class(cls, classname): '\n Removes the converter registered for the given classname. Dicts with that classname tag\n will be deserialized by the default mechanism again.\n ' if (classname in cls.__custom_dict_to_class_registry): del cls.__custom_dict_to_class_registry[classname]
8,739,360,008,819,532,000
Removes the converter registered for the given classname. Dicts with that classname tag will be deserialized by the default mechanism again.
Pyro5/serializers.py
unregister_dict_to_class
gst/Pyro5
python
@classmethod def unregister_dict_to_class(cls, classname): '\n Removes the converter registered for the given classname. Dicts with that classname tag\n will be deserialized by the default mechanism again.\n ' if (classname in cls.__custom_dict_to_class_registry): del cls.__custom_dict_to_class_registry[classname]
@classmethod def class_to_dict(cls, obj): '\n Convert a non-serializable object to a dict. Partly borrowed from serpent.\n ' for clazz in cls.__custom_class_to_dict_registry: if isinstance(obj, clazz): return cls.__custom_class_to_dict_registry[clazz](obj) if (type(obj) in (set, dict, tuple, list)): raise ValueError((("can't serialize type " + str(obj.__class__)) + ' into a dict')) if hasattr(obj, '_pyroDaemon'): obj._pyroDaemon = None if isinstance(obj, BaseException): return {'__class__': ((obj.__class__.__module__ + '.') + obj.__class__.__name__), '__exception__': True, 'args': obj.args, 'attributes': vars(obj)} try: value = obj.__getstate__() except AttributeError: pass else: if isinstance(value, dict): return value try: value = dict(vars(obj)) value['__class__'] = ((obj.__class__.__module__ + '.') + obj.__class__.__name__) return value except TypeError: if hasattr(obj, '__slots__'): value = {} for slot in obj.__slots__: value[slot] = getattr(obj, slot) value['__class__'] = ((obj.__class__.__module__ + '.') + obj.__class__.__name__) return value else: raise errors.SerializeError((((("don't know how to serialize class " + str(obj.__class__)) + ' using serializer ') + str(cls.__name__)) + '. Give it vars() or an appropriate __getstate__'))
4,357,961,985,474,876,000
Convert a non-serializable object to a dict. Partly borrowed from serpent.
Pyro5/serializers.py
class_to_dict
gst/Pyro5
python
@classmethod def class_to_dict(cls, obj): '\n \n ' for clazz in cls.__custom_class_to_dict_registry: if isinstance(obj, clazz): return cls.__custom_class_to_dict_registry[clazz](obj) if (type(obj) in (set, dict, tuple, list)): raise ValueError((("can't serialize type " + str(obj.__class__)) + ' into a dict')) if hasattr(obj, '_pyroDaemon'): obj._pyroDaemon = None if isinstance(obj, BaseException): return {'__class__': ((obj.__class__.__module__ + '.') + obj.__class__.__name__), '__exception__': True, 'args': obj.args, 'attributes': vars(obj)} try: value = obj.__getstate__() except AttributeError: pass else: if isinstance(value, dict): return value try: value = dict(vars(obj)) value['__class__'] = ((obj.__class__.__module__ + '.') + obj.__class__.__name__) return value except TypeError: if hasattr(obj, '__slots__'): value = {} for slot in obj.__slots__: value[slot] = getattr(obj, slot) value['__class__'] = ((obj.__class__.__module__ + '.') + obj.__class__.__name__) return value else: raise errors.SerializeError((((("don't know how to serialize class " + str(obj.__class__)) + ' using serializer ') + str(cls.__name__)) + '. Give it vars() or an appropriate __getstate__'))
@classmethod def dict_to_class(cls, data): '\n Recreate an object out of a dict containing the class name and the attributes.\n Only a fixed set of classes are recognized.\n ' from . import core, client, server classname = data.get('__class__', '<unknown>') if isinstance(classname, bytes): classname = classname.decode('utf-8') if (classname in cls.__custom_dict_to_class_registry): converter = cls.__custom_dict_to_class_registry[classname] return converter(classname, data) if ('__' in classname): raise errors.SecurityError(('refused to deserialize types with double underscores in their name: ' + classname)) if (classname == 'Pyro5.core.URI'): uri = core.URI.__new__(core.URI) uri.__setstate__(data['state']) return uri elif (classname == 'Pyro5.client.Proxy'): proxy = client.Proxy.__new__(client.Proxy) proxy.__setstate__(data['state']) return proxy elif (classname == 'Pyro5.server.Daemon'): daemon = server.Daemon.__new__(server.Daemon) daemon.__setstate__(data['state']) return daemon elif classname.startswith('Pyro5.util.'): if (classname == 'Pyro5.util.SerpentSerializer'): return SerpentSerializer() elif (classname == 'Pyro5.util.MarshalSerializer'): return MarshalSerializer() elif (classname == 'Pyro5.util.JsonSerializer'): return JsonSerializer() elif (classname == 'Pyro5.util.MsgpackSerializer'): return MsgpackSerializer() elif classname.startswith('Pyro5.errors.'): errortype = getattr(errors, classname.split('.', 2)[2]) if issubclass(errortype, errors.PyroError): return SerializerBase.make_exception(errortype, data) elif (classname == 'struct.error'): return SerializerBase.make_exception(struct.error, data) elif (classname == 'Pyro5.core._ExceptionWrapper'): ex = data['exception'] if (isinstance(ex, dict) and ('__class__' in ex)): ex = SerializerBase.dict_to_class(ex) return core._ExceptionWrapper(ex) elif data.get('__exception__', False): if (classname in all_exceptions): return SerializerBase.make_exception(all_exceptions[classname], data) (namespace, short_classname) = classname.split('.', 1) if (namespace in ('builtins', 'exceptions')): exceptiontype = getattr(builtins, short_classname) if issubclass(exceptiontype, BaseException): return SerializerBase.make_exception(exceptiontype, data) elif ((namespace == 'sqlite3') and short_classname.endswith('Error')): import sqlite3 exceptiontype = getattr(sqlite3, short_classname) if issubclass(exceptiontype, BaseException): return SerializerBase.make_exception(exceptiontype, data) log.warning(('unsupported serialized class: ' + classname)) raise errors.SerializeError(('unsupported serialized class: ' + classname))
-3,179,829,849,173,357,600
Recreate an object out of a dict containing the class name and the attributes. Only a fixed set of classes are recognized.
Pyro5/serializers.py
dict_to_class
gst/Pyro5
python
@classmethod def dict_to_class(cls, data): '\n Recreate an object out of a dict containing the class name and the attributes.\n Only a fixed set of classes are recognized.\n ' from . import core, client, server classname = data.get('__class__', '<unknown>') if isinstance(classname, bytes): classname = classname.decode('utf-8') if (classname in cls.__custom_dict_to_class_registry): converter = cls.__custom_dict_to_class_registry[classname] return converter(classname, data) if ('__' in classname): raise errors.SecurityError(('refused to deserialize types with double underscores in their name: ' + classname)) if (classname == 'Pyro5.core.URI'): uri = core.URI.__new__(core.URI) uri.__setstate__(data['state']) return uri elif (classname == 'Pyro5.client.Proxy'): proxy = client.Proxy.__new__(client.Proxy) proxy.__setstate__(data['state']) return proxy elif (classname == 'Pyro5.server.Daemon'): daemon = server.Daemon.__new__(server.Daemon) daemon.__setstate__(data['state']) return daemon elif classname.startswith('Pyro5.util.'): if (classname == 'Pyro5.util.SerpentSerializer'): return SerpentSerializer() elif (classname == 'Pyro5.util.MarshalSerializer'): return MarshalSerializer() elif (classname == 'Pyro5.util.JsonSerializer'): return JsonSerializer() elif (classname == 'Pyro5.util.MsgpackSerializer'): return MsgpackSerializer() elif classname.startswith('Pyro5.errors.'): errortype = getattr(errors, classname.split('.', 2)[2]) if issubclass(errortype, errors.PyroError): return SerializerBase.make_exception(errortype, data) elif (classname == 'struct.error'): return SerializerBase.make_exception(struct.error, data) elif (classname == 'Pyro5.core._ExceptionWrapper'): ex = data['exception'] if (isinstance(ex, dict) and ('__class__' in ex)): ex = SerializerBase.dict_to_class(ex) return core._ExceptionWrapper(ex) elif data.get('__exception__', False): if (classname in all_exceptions): return SerializerBase.make_exception(all_exceptions[classname], data) (namespace, short_classname) = classname.split('.', 1) if (namespace in ('builtins', 'exceptions')): exceptiontype = getattr(builtins, short_classname) if issubclass(exceptiontype, BaseException): return SerializerBase.make_exception(exceptiontype, data) elif ((namespace == 'sqlite3') and short_classname.endswith('Error')): import sqlite3 exceptiontype = getattr(sqlite3, short_classname) if issubclass(exceptiontype, BaseException): return SerializerBase.make_exception(exceptiontype, data) log.warning(('unsupported serialized class: ' + classname)) raise errors.SerializeError(('unsupported serialized class: ' + classname))
def __eq__(self, other): 'this equality method is only to support the unit tests of this class' return (isinstance(other, SerializerBase) and (vars(self) == vars(other)))
-3,430,843,197,592,776,700
this equality method is only to support the unit tests of this class
Pyro5/serializers.py
__eq__
gst/Pyro5
python
def __eq__(self, other): return (isinstance(other, SerializerBase) and (vars(self) == vars(other)))
@staticmethod def get_classes_from_module(module: ModuleType) -> Dict[(str, Type[Any])]: '\n Find classes inside a python module file.\n ' try: return {name: cls for (name, cls) in module.__dict__.items() if isinstance(cls, type)} except AttributeError: log.warning('Could not find any class in module {}', module) return {}
-4,971,437,206,064,173,000
Find classes inside a python module file.
restapi/utilities/meta.py
get_classes_from_module
rapydo/http-api
python
@staticmethod def get_classes_from_module(module: ModuleType) -> Dict[(str, Type[Any])]: '\n \n ' try: return {name: cls for (name, cls) in module.__dict__.items() if isinstance(cls, type)} except AttributeError: log.warning('Could not find any class in module {}', module) return {}
@staticmethod def get_new_classes_from_module(module: ModuleType) -> Dict[(str, Type[Any])]: '\n Skip classes not originated inside the module.\n ' classes = {} for (name, value) in Meta.get_classes_from_module(module).items(): if (module.__name__ in value.__module__): classes[name] = value return classes
182,685,707,624,112,300
Skip classes not originated inside the module.
restapi/utilities/meta.py
get_new_classes_from_module
rapydo/http-api
python
@staticmethod def get_new_classes_from_module(module: ModuleType) -> Dict[(str, Type[Any])]: '\n \n ' classes = {} for (name, value) in Meta.get_classes_from_module(module).items(): if (module.__name__ in value.__module__): classes[name] = value return classes
@staticmethod def get_module_from_string(modulestring: str, exit_on_fail: bool=False) -> Optional[ModuleType]: '\n Getting a module import\n when your module is stored as a string in a variable\n ' try: return import_module(modulestring) except ModuleNotFoundError as e: if exit_on_fail: log.error(e) raise e return None except Exception as e: if exit_on_fail: log.error(e) raise e log.error('Module {} not found.\nError: {}', modulestring, e) return None
7,899,737,331,426,552,000
Getting a module import when your module is stored as a string in a variable
restapi/utilities/meta.py
get_module_from_string
rapydo/http-api
python
@staticmethod def get_module_from_string(modulestring: str, exit_on_fail: bool=False) -> Optional[ModuleType]: '\n Getting a module import\n when your module is stored as a string in a variable\n ' try: return import_module(modulestring) except ModuleNotFoundError as e: if exit_on_fail: log.error(e) raise e return None except Exception as e: if exit_on_fail: log.error(e) raise e log.error('Module {} not found.\nError: {}', modulestring, e) return None
@staticmethod def get_self_reference_from_args(*args: Any) -> Optional[Any]: "\n Useful in decorators:\n being able to call the internal method by getting\n the 'self' reference from the decorated method\n (when it's there)\n " if (len(args) > 0): candidate_as_self = args[0] cls_attribute = getattr(candidate_as_self, '__class__', None) if ((cls_attribute is not None) and inspect.isclass(cls_attribute)): return args[0] return None
5,626,463,926,865,336,000
Useful in decorators: being able to call the internal method by getting the 'self' reference from the decorated method (when it's there)
restapi/utilities/meta.py
get_self_reference_from_args
rapydo/http-api
python
@staticmethod def get_self_reference_from_args(*args: Any) -> Optional[Any]: "\n Useful in decorators:\n being able to call the internal method by getting\n the 'self' reference from the decorated method\n (when it's there)\n " if (len(args) > 0): candidate_as_self = args[0] cls_attribute = getattr(candidate_as_self, '__class__', None) if ((cls_attribute is not None) and inspect.isclass(cls_attribute)): return args[0] return None
@staticmethod def get_celery_tasks(package_name: str) -> List[Callable[(..., Any)]]: '\n Extract all celery tasks from a module.\n Celery tasks are functions decorated by @CeleryExt.celery_app.task(...)\n This decorator transform the function into a class child of\n celery.local.PromiseProxy\n ' tasks: List[Callable[(..., Any)]] = [] package = Meta.get_module_from_string(package_name) if (package is None): return tasks path = package.__path__ for (_, module_name, ispkg) in pkgutil.iter_modules(path): if ispkg: continue module_path = f'{package_name}.{module_name}' log.debug("Loading module '{}'", module_path) submodule = Meta.get_module_from_string(module_path, exit_on_fail=True) functions = inspect.getmembers(submodule) for func in functions: obj_type = type(func[1]) if (obj_type.__module__ != 'celery.local'): continue tasks.append(func[1]) return tasks
-1,080,608,678,264,604,900
Extract all celery tasks from a module. Celery tasks are functions decorated by @CeleryExt.celery_app.task(...) This decorator transform the function into a class child of celery.local.PromiseProxy
restapi/utilities/meta.py
get_celery_tasks
rapydo/http-api
python
@staticmethod def get_celery_tasks(package_name: str) -> List[Callable[(..., Any)]]: '\n Extract all celery tasks from a module.\n Celery tasks are functions decorated by @CeleryExt.celery_app.task(...)\n This decorator transform the function into a class child of\n celery.local.PromiseProxy\n ' tasks: List[Callable[(..., Any)]] = [] package = Meta.get_module_from_string(package_name) if (package is None): return tasks path = package.__path__ for (_, module_name, ispkg) in pkgutil.iter_modules(path): if ispkg: continue module_path = f'{package_name}.{module_name}' log.debug("Loading module '{}'", module_path) submodule = Meta.get_module_from_string(module_path, exit_on_fail=True) functions = inspect.getmembers(submodule) for func in functions: obj_type = type(func[1]) if (obj_type.__module__ != 'celery.local'): continue tasks.append(func[1]) return tasks
def __init__(self): '\n\t\tCreates the himesis graph representing the AToM3 model HUnitR06_IsolatedLHS\n\t\t' self.is_compiled = True super(HUnitR06_IsolatedLHS, self).__init__(name='HUnitR06_IsolatedLHS', num_nodes=0, edges=[]) self.add_edges([]) self['mm__'] = ['MT_pre__FamiliesToPersonsMM', 'MoTifRule'] self['MT_constraint__'] = 'return True' self['name'] = '' self['GUID__'] = uuid.uuid3(uuid.NAMESPACE_DNS, 'HUnitR06_IsolatedLHS') self['equations'] = [] self.add_node() self.vs[0]['MT_pre__attr1'] = 'return True' self.vs[0]['MT_label__'] = '1' self.vs[0]['mm__'] = 'MT_pre__PhysicalNode' self.vs[0]['GUID__'] = uuid.uuid3(uuid.NAMESPACE_DNS, '6.0.m.0PhysicalNode') self.add_node() self.vs[1]['MT_pre__attr1'] = 'return True' self.vs[1]['MT_label__'] = '2' self.vs[1]['mm__'] = 'MT_pre__Partition' self.vs[1]['GUID__'] = uuid.uuid3(uuid.NAMESPACE_DNS, '6.0.m.1Partition')
-398,784,226,369,132,100
Creates the himesis graph representing the AToM3 model HUnitR06_IsolatedLHS
GM2AUTOSAR_MM/Properties/unit_contracts/HUnitR06_IsolatedLHS.py
__init__
levilucio/SyVOLT
python
def __init__(self): '\n\t\t\n\t\t' self.is_compiled = True super(HUnitR06_IsolatedLHS, self).__init__(name='HUnitR06_IsolatedLHS', num_nodes=0, edges=[]) self.add_edges([]) self['mm__'] = ['MT_pre__FamiliesToPersonsMM', 'MoTifRule'] self['MT_constraint__'] = 'return True' self['name'] = self['GUID__'] = uuid.uuid3(uuid.NAMESPACE_DNS, 'HUnitR06_IsolatedLHS') self['equations'] = [] self.add_node() self.vs[0]['MT_pre__attr1'] = 'return True' self.vs[0]['MT_label__'] = '1' self.vs[0]['mm__'] = 'MT_pre__PhysicalNode' self.vs[0]['GUID__'] = uuid.uuid3(uuid.NAMESPACE_DNS, '6.0.m.0PhysicalNode') self.add_node() self.vs[1]['MT_pre__attr1'] = 'return True' self.vs[1]['MT_label__'] = '2' self.vs[1]['mm__'] = 'MT_pre__Partition' self.vs[1]['GUID__'] = uuid.uuid3(uuid.NAMESPACE_DNS, '6.0.m.1Partition')
def set_trace(self, frame=None, as_breakpoint=None, paused=True): "Start debugging from `frame`.\n\n If frame is not specified, debugging starts from caller's frame.\n\n Unlike Bdb.set_trace(), this does not call self.reset(), which causes\n the debugger to enter bdb source code. This also implements treating\n set_trace() calls as breakpoints in the PuDB UI.\n\n If as_breakpoint=True (the default), this call will be treated like a\n breakpoint in the UI (you can press 'b' on it to disable breaking\n here).\n\n If paused=False, the debugger will not break here.\n " if (as_breakpoint is None): if (not paused): as_breakpoint = False else: as_breakpoint = True if (frame is None): frame = thisframe = sys._getframe().f_back else: thisframe = frame while frame: frame.f_trace = self.trace_dispatch self.botframe = frame frame = frame.f_back thisframe_info = (self.canonic(thisframe.f_code.co_filename), thisframe.f_lineno) if ((thisframe_info not in self.set_traces) or self.set_traces[thisframe_info]): if as_breakpoint: self.set_traces[thisframe_info] = True if (self.ui.source_code_provider is not None): self.ui.set_source_code_provider(self.ui.source_code_provider, force_update=True) if paused: self.set_step() else: self.set_continue() sys.settrace(self.trace_dispatch) else: return
560,936,211,875,106,400
Start debugging from `frame`. If frame is not specified, debugging starts from caller's frame. Unlike Bdb.set_trace(), this does not call self.reset(), which causes the debugger to enter bdb source code. This also implements treating set_trace() calls as breakpoints in the PuDB UI. If as_breakpoint=True (the default), this call will be treated like a breakpoint in the UI (you can press 'b' on it to disable breaking here). If paused=False, the debugger will not break here.
pudb/debugger.py
set_trace
chrisamow/pudb
python
def set_trace(self, frame=None, as_breakpoint=None, paused=True): "Start debugging from `frame`.\n\n If frame is not specified, debugging starts from caller's frame.\n\n Unlike Bdb.set_trace(), this does not call self.reset(), which causes\n the debugger to enter bdb source code. This also implements treating\n set_trace() calls as breakpoints in the PuDB UI.\n\n If as_breakpoint=True (the default), this call will be treated like a\n breakpoint in the UI (you can press 'b' on it to disable breaking\n here).\n\n If paused=False, the debugger will not break here.\n " if (as_breakpoint is None): if (not paused): as_breakpoint = False else: as_breakpoint = True if (frame is None): frame = thisframe = sys._getframe().f_back else: thisframe = frame while frame: frame.f_trace = self.trace_dispatch self.botframe = frame frame = frame.f_back thisframe_info = (self.canonic(thisframe.f_code.co_filename), thisframe.f_lineno) if ((thisframe_info not in self.set_traces) or self.set_traces[thisframe_info]): if as_breakpoint: self.set_traces[thisframe_info] = True if (self.ui.source_code_provider is not None): self.ui.set_source_code_provider(self.ui.source_code_provider, force_update=True) if paused: self.set_step() else: self.set_continue() sys.settrace(self.trace_dispatch) else: return
def user_call(self, frame, argument_list): 'This method is called when there is the remote possibility\n that we ever need to stop in this function.' if self._wait_for_mainpyfile: return if self.stop_here(frame): self.interaction(frame)
-5,955,718,721,955,943,000
This method is called when there is the remote possibility that we ever need to stop in this function.
pudb/debugger.py
user_call
chrisamow/pudb
python
def user_call(self, frame, argument_list): 'This method is called when there is the remote possibility\n that we ever need to stop in this function.' if self._wait_for_mainpyfile: return if self.stop_here(frame): self.interaction(frame)
def user_line(self, frame): 'This function is called when we stop or break at this line.' if ('__exc_tuple__' in frame.f_locals): del frame.f_locals['__exc_tuple__'] if self._wait_for_mainpyfile: if ((self.mainpyfile != self.canonic(frame.f_code.co_filename)) or (frame.f_lineno <= 0)): return self._wait_for_mainpyfile = False self.bottom_frame = frame if self.get_break(self.canonic(frame.f_code.co_filename), frame.f_lineno): self.current_bp = (self.canonic(frame.f_code.co_filename), frame.f_lineno) else: self.current_bp = None try: self.ui.update_breakpoints() self.interaction(frame) except Exception: self.ui.show_internal_exc_dlg(sys.exc_info())
-4,471,584,001,692,289,500
This function is called when we stop or break at this line.
pudb/debugger.py
user_line
chrisamow/pudb
python
def user_line(self, frame): if ('__exc_tuple__' in frame.f_locals): del frame.f_locals['__exc_tuple__'] if self._wait_for_mainpyfile: if ((self.mainpyfile != self.canonic(frame.f_code.co_filename)) or (frame.f_lineno <= 0)): return self._wait_for_mainpyfile = False self.bottom_frame = frame if self.get_break(self.canonic(frame.f_code.co_filename), frame.f_lineno): self.current_bp = (self.canonic(frame.f_code.co_filename), frame.f_lineno) else: self.current_bp = None try: self.ui.update_breakpoints() self.interaction(frame) except Exception: self.ui.show_internal_exc_dlg(sys.exc_info())
def user_return(self, frame, return_value): 'This function is called when a return trap is set here.' if (frame.f_code.co_name != '<module>'): frame.f_locals['__return__'] = return_value if self._wait_for_mainpyfile: if ((self.mainpyfile != self.canonic(frame.f_code.co_filename)) or (frame.f_lineno <= 0)): return self._wait_for_mainpyfile = False self.bottom_frame = frame if ('__exc_tuple__' not in frame.f_locals): self.interaction(frame)
105,449,302,553,384,080
This function is called when a return trap is set here.
pudb/debugger.py
user_return
chrisamow/pudb
python
def user_return(self, frame, return_value): if (frame.f_code.co_name != '<module>'): frame.f_locals['__return__'] = return_value if self._wait_for_mainpyfile: if ((self.mainpyfile != self.canonic(frame.f_code.co_filename)) or (frame.f_lineno <= 0)): return self._wait_for_mainpyfile = False self.bottom_frame = frame if ('__exc_tuple__' not in frame.f_locals): self.interaction(frame)
def user_exception(self, frame, exc_tuple): 'This function is called if an exception occurs,\n but only if we are to stop at or just below this level.' frame.f_locals['__exc_tuple__'] = exc_tuple if (not self._wait_for_mainpyfile): self.interaction(frame, exc_tuple)
6,194,993,105,355,423,000
This function is called if an exception occurs, but only if we are to stop at or just below this level.
pudb/debugger.py
user_exception
chrisamow/pudb
python
def user_exception(self, frame, exc_tuple): 'This function is called if an exception occurs,\n but only if we are to stop at or just below this level.' frame.f_locals['__exc_tuple__'] = exc_tuple if (not self._wait_for_mainpyfile): self.interaction(frame, exc_tuple)
def signal_init(self): 'Initialize signal handler, ignoring errors silently.' try: super().signal_init() except ValueError: pass
6,588,031,780,759,060,000
Initialize signal handler, ignoring errors silently.
pudb/debugger.py
signal_init
chrisamow/pudb
python
def signal_init(self): try: super().signal_init() except ValueError: pass
def signal_restore(self): 'Restore default signal handler, ignoring errors silently.' try: super().signal_restore() except ValueError: pass
5,837,801,262,573,254,000
Restore default signal handler, ignoring errors silently.
pudb/debugger.py
signal_restore
chrisamow/pudb
python
def signal_restore(self): try: super().signal_restore() except ValueError: pass
def show_line(self, line, source_code_provider=None): 'Updates the UI so that a certain line is currently in view.' changed_file = False if (source_code_provider is not None): changed_file = (self.source_code_provider != source_code_provider) self.set_source_code_provider(source_code_provider) line -= 1 if ((line >= 0) and (line < len(self.source))): self.source_list.set_focus(line) if changed_file: self.source_list.set_focus_valign('middle')
-1,229,994,139,835,825,000
Updates the UI so that a certain line is currently in view.
pudb/debugger.py
show_line
chrisamow/pudb
python
def show_line(self, line, source_code_provider=None): changed_file = False if (source_code_provider is not None): changed_file = (self.source_code_provider != source_code_provider) self.set_source_code_provider(source_code_provider) line -= 1 if ((line >= 0) and (line < len(self.source))): self.source_list.set_focus(line) if changed_file: self.source_list.set_focus_valign('middle')
def set_current_line(self, line, source_code_provider): 'Updates the UI to show the line currently being executed.' if (self.current_line is not None): self.current_line.set_current(False) self.show_line(line, source_code_provider) line -= 1 if ((line >= 0) and (line < len(self.source))): self.current_line = self.source[line] self.current_line.set_current(True)
-738,099,850,055,954,800
Updates the UI to show the line currently being executed.
pudb/debugger.py
set_current_line
chrisamow/pudb
python
def set_current_line(self, line, source_code_provider): if (self.current_line is not None): self.current_line.set_current(False) self.show_line(line, source_code_provider) line -= 1 if ((line >= 0) and (line < len(self.source))): self.current_line = self.source[line] self.current_line.set_current(True)
@support.cpython_only def test_peephole_opt_unreachable_code_array_access_in_bounds(self): 'Regression test for issue35193 when run under clang msan.' def unused_code_at_end(): return 3 raise RuntimeError('unreachable') self.assertEqual('RETURN_VALUE', list(dis.get_instructions(unused_code_at_end))[(- 1)].opname)
-550,594,875,470,589,800
Regression test for issue35193 when run under clang msan.
examples/wagipython/wagi-python/opt/wasi-python/lib/python3.11/test/test_compile.py
test_peephole_opt_unreachable_code_array_access_in_bounds
ScriptBox99/wagi-dotnet
python
@support.cpython_only def test_peephole_opt_unreachable_code_array_access_in_bounds(self): def unused_code_at_end(): return 3 raise RuntimeError('unreachable') self.assertEqual('RETURN_VALUE', list(dis.get_instructions(unused_code_at_end))[(- 1)].opname)
def f1(): 'docstring' return 42
2,956,622,419,990,045,700
docstring
examples/wagipython/wagi-python/opt/wasi-python/lib/python3.11/test/test_compile.py
f1
ScriptBox99/wagi-dotnet
python
def f1(): return 42
def no_code1(): 'doc string'
-978,215,992,770,197,000
doc string
examples/wagipython/wagi-python/opt/wasi-python/lib/python3.11/test/test_compile.py
no_code1
ScriptBox99/wagi-dotnet
python
def no_code1():
@pytest.fixture(scope='session', autouse=True) def mock_get_event_queryset_no_job_created(): "\n SQLite friendly since partitions aren't supported. Do not add the faked job_created field to the filter. If we do, it will result in an sql query for the\n job_created field. That field does not actually exist in a non-partition scenario.\n " def event_qs(self): kwargs = {self.event_parent_key: self.id} return self.event_class.objects.filter(**kwargs) with mock.patch.object(UnifiedJob, 'get_event_queryset', (lambda self: event_qs(self))) as _fixture: (yield _fixture)
6,119,196,560,993,237,000
SQLite friendly since partitions aren't supported. Do not add the faked job_created field to the filter. If we do, it will result in an sql query for the job_created field. That field does not actually exist in a non-partition scenario.
awx/main/tests/conftest.py
mock_get_event_queryset_no_job_created
AdamVB/awx
python
@pytest.fixture(scope='session', autouse=True) def mock_get_event_queryset_no_job_created(): "\n SQLite friendly since partitions aren't supported. Do not add the faked job_created field to the filter. If we do, it will result in an sql query for the\n job_created field. That field does not actually exist in a non-partition scenario.\n " def event_qs(self): kwargs = {self.event_parent_key: self.id} return self.event_class.objects.filter(**kwargs) with mock.patch.object(UnifiedJob, 'get_event_queryset', (lambda self: event_qs(self))) as _fixture: (yield _fixture)
def rf(persisted): 'Returns job with linked JT survey with password survey questions' objects = job_template_factory('jt', organization='org1', survey=[{'variable': 'submitter_email', 'type': 'text', 'default': '[email protected]'}, {'variable': 'secret_key', 'default': '6kQngg3h8lgiSTvIEb21', 'type': 'password'}, {'variable': 'SSN', 'type': 'password'}], persisted=persisted) return objects.job_template
7,814,057,701,853,962,000
Returns job with linked JT survey with password survey questions
awx/main/tests/conftest.py
rf
AdamVB/awx
python
def rf(persisted): objects = job_template_factory('jt', organization='org1', survey=[{'variable': 'submitter_email', 'type': 'text', 'default': '[email protected]'}, {'variable': 'secret_key', 'default': '6kQngg3h8lgiSTvIEb21', 'type': 'password'}, {'variable': 'SSN', 'type': 'password'}], persisted=persisted) return objects.job_template
def stack_seeds(n_row: int, n_col: int, seeds_row: Optional[Union[(np.ndarray, dict)]], seeds_col: Optional[Union[(np.ndarray, dict)]]=None, default_value: float=(- 1)) -> np.ndarray: 'Process seeds for rows and columns and stack the results into a single vector.' if ((seeds_row is None) and (seeds_col is None)): seeds_row = np.ones(n_row) seeds_col = (default_value * np.ones(n_col)) elif (seeds_row is None): seeds_row = (default_value * np.ones(n_row)) elif (seeds_col is None): seeds_col = (default_value * np.ones(n_col)) seeds_row = check_seeds(seeds_row, n_row) seeds_col = check_seeds(seeds_col, n_col) return np.hstack((seeds_row, seeds_col))
-9,066,853,132,596,986,000
Process seeds for rows and columns and stack the results into a single vector.
sknetwork/utils/seeds.py
stack_seeds
JulienSim001/scikit-network
python
def stack_seeds(n_row: int, n_col: int, seeds_row: Optional[Union[(np.ndarray, dict)]], seeds_col: Optional[Union[(np.ndarray, dict)]]=None, default_value: float=(- 1)) -> np.ndarray: if ((seeds_row is None) and (seeds_col is None)): seeds_row = np.ones(n_row) seeds_col = (default_value * np.ones(n_col)) elif (seeds_row is None): seeds_row = (default_value * np.ones(n_row)) elif (seeds_col is None): seeds_col = (default_value * np.ones(n_col)) seeds_row = check_seeds(seeds_row, n_row) seeds_col = check_seeds(seeds_col, n_col) return np.hstack((seeds_row, seeds_col))
def seeds2probs(n: int, seeds: Union[(dict, np.ndarray)]=None) -> np.ndarray: 'Transform seeds into probability vector.\n\n Parameters\n ----------\n n : int\n Total number of samples.\n seeds :\n If ``None``, the uniform distribution is used.\n Otherwise, a non-negative, non-zero vector or a dictionary must be provided.\n\n Returns\n -------\n probs: np.ndarray\n A probability vector.\n ' if (seeds is None): return (np.ones(n) / n) else: seeds = check_seeds(seeds, n) probs = np.zeros_like(seeds, dtype=float) ix = (seeds > 0) probs[ix] = seeds[ix] w: float = probs.sum() if (w > 0): return (probs / w) else: raise ValueError('At least one seeds must have a positive probability.')
2,715,893,919,102,163,500
Transform seeds into probability vector. Parameters ---------- n : int Total number of samples. seeds : If ``None``, the uniform distribution is used. Otherwise, a non-negative, non-zero vector or a dictionary must be provided. Returns ------- probs: np.ndarray A probability vector.
sknetwork/utils/seeds.py
seeds2probs
JulienSim001/scikit-network
python
def seeds2probs(n: int, seeds: Union[(dict, np.ndarray)]=None) -> np.ndarray: 'Transform seeds into probability vector.\n\n Parameters\n ----------\n n : int\n Total number of samples.\n seeds :\n If ``None``, the uniform distribution is used.\n Otherwise, a non-negative, non-zero vector or a dictionary must be provided.\n\n Returns\n -------\n probs: np.ndarray\n A probability vector.\n ' if (seeds is None): return (np.ones(n) / n) else: seeds = check_seeds(seeds, n) probs = np.zeros_like(seeds, dtype=float) ix = (seeds > 0) probs[ix] = seeds[ix] w: float = probs.sum() if (w > 0): return (probs / w) else: raise ValueError('At least one seeds must have a positive probability.')
@intrinsic def viewer(tyctx, val, viewty): " Bitcast a scalar 'val' to the given type 'viewty'. " bits = val.bitwidth if isinstance(viewty.dtype, types.Integer): bitcastty = ir.IntType(bits) elif isinstance(viewty.dtype, types.Float): bitcastty = (ir.FloatType() if (bits == 32) else ir.DoubleType()) else: assert 0, 'unreachable' def codegen(cgctx, builder, typ, args): flt = args[0] return builder.bitcast(flt, bitcastty) retty = viewty.dtype sig = retty(val, viewty) return (sig, codegen)
8,181,720,399,996,662,000
Bitcast a scalar 'val' to the given type 'viewty'.
numba/unsafe/numbers.py
viewer
Hardcode84/numba
python
@intrinsic def viewer(tyctx, val, viewty): " " bits = val.bitwidth if isinstance(viewty.dtype, types.Integer): bitcastty = ir.IntType(bits) elif isinstance(viewty.dtype, types.Float): bitcastty = (ir.FloatType() if (bits == 32) else ir.DoubleType()) else: assert 0, 'unreachable' def codegen(cgctx, builder, typ, args): flt = args[0] return builder.bitcast(flt, bitcastty) retty = viewty.dtype sig = retty(val, viewty) return (sig, codegen)
@intrinsic def trailing_zeros(typeingctx, src): 'Counts trailing zeros in the binary representation of an integer.' if (not isinstance(src, types.Integer)): raise TypeError("trailing_zeros is only defined for integers, but passed value was '{}'.".format(src)) def codegen(context, builder, signature, args): [src] = args return builder.cttz(src, ir.Constant(ir.IntType(1), 0)) return (src(src), codegen)
-2,407,326,293,384,501,000
Counts trailing zeros in the binary representation of an integer.
numba/unsafe/numbers.py
trailing_zeros
Hardcode84/numba
python
@intrinsic def trailing_zeros(typeingctx, src): if (not isinstance(src, types.Integer)): raise TypeError("trailing_zeros is only defined for integers, but passed value was '{}'.".format(src)) def codegen(context, builder, signature, args): [src] = args return builder.cttz(src, ir.Constant(ir.IntType(1), 0)) return (src(src), codegen)
@intrinsic def leading_zeros(typeingctx, src): 'Counts leading zeros in the binary representation of an integer.' if (not isinstance(src, types.Integer)): raise TypeError("leading_zeros is only defined for integers, but passed value was '{}'.".format(src)) def codegen(context, builder, signature, args): [src] = args return builder.ctlz(src, ir.Constant(ir.IntType(1), 0)) return (src(src), codegen)
6,071,251,006,695,300,000
Counts leading zeros in the binary representation of an integer.
numba/unsafe/numbers.py
leading_zeros
Hardcode84/numba
python
@intrinsic def leading_zeros(typeingctx, src): if (not isinstance(src, types.Integer)): raise TypeError("leading_zeros is only defined for integers, but passed value was '{}'.".format(src)) def codegen(context, builder, signature, args): [src] = args return builder.ctlz(src, ir.Constant(ir.IntType(1), 0)) return (src(src), codegen)
def bcp(sql_item: str, direction: str, flat_file: str, creds, sql_type: str='table', schema: str='dbo', format_file_path: str=None, batch_size: int=None, col_delimiter: str=None, row_terminator: str=None, bcp_path: Union[(str, Path)]=None, error_file_path: str=None): '\n See https://docs.microsoft.com/en-us/sql/tools/bcp-utility\n ' combos = {TABLE: [IN, OUT], QUERY: [QUERYOUT], VIEW: [IN, OUT]} direc = direction.lower() if (direc not in DIRECTIONS): raise BCPandasValueError(f"Param 'direction' must be one of {DIRECTIONS}, you passed {direc}") if (direc not in combos[sql_type]): raise BCPandasValueError(f'Wrong combo of direction and SQL object, you passed {sql_type} and {direc} .') if creds.with_krb_auth: auth = ['-T'] else: auth = ['-U', creds.username, '-P', creds.password] if (sql_type == QUERY): sql_item_string = quote_this(''.join(sql_item.splitlines())) else: sql_item_string = f'{schema}.{sql_item}' bcp_command = ([('bcp' if (bcp_path is None) else quote_this(str(bcp_path))), sql_item_string, direc, flat_file, '-S', creds.server, '-d', creds.database, '-q', '-e', error_file_path] + auth) if batch_size: bcp_command += ['-b', str(batch_size)] if (direc == IN): bcp_command += ['-f', format_file_path] elif (direc in (OUT, QUERYOUT)): bcp_command += ['-c', quote_this(f"-t{(read_data_settings['delimiter'] if (col_delimiter is None) else col_delimiter)}"), quote_this(f"-r{(read_data_settings['newline'] if (row_terminator is None) else row_terminator)}")] bcp_command_log = [(c if (c != creds.password) else '[REDACTED]') for c in bcp_command] logger.info(f'''Executing BCP command now... BCP command is: {bcp_command_log}''') ret_code = run_cmd(bcp_command) if ret_code: raise BCPandasException(f'Bcp command failed with exit code {ret_code}')
7,696,618,202,439,960,000
See https://docs.microsoft.com/en-us/sql/tools/bcp-utility
bcpandas/utils.py
bcp
alon-r/bcpandas
python
def bcp(sql_item: str, direction: str, flat_file: str, creds, sql_type: str='table', schema: str='dbo', format_file_path: str=None, batch_size: int=None, col_delimiter: str=None, row_terminator: str=None, bcp_path: Union[(str, Path)]=None, error_file_path: str=None): '\n \n ' combos = {TABLE: [IN, OUT], QUERY: [QUERYOUT], VIEW: [IN, OUT]} direc = direction.lower() if (direc not in DIRECTIONS): raise BCPandasValueError(f"Param 'direction' must be one of {DIRECTIONS}, you passed {direc}") if (direc not in combos[sql_type]): raise BCPandasValueError(f'Wrong combo of direction and SQL object, you passed {sql_type} and {direc} .') if creds.with_krb_auth: auth = ['-T'] else: auth = ['-U', creds.username, '-P', creds.password] if (sql_type == QUERY): sql_item_string = quote_this(.join(sql_item.splitlines())) else: sql_item_string = f'{schema}.{sql_item}' bcp_command = ([('bcp' if (bcp_path is None) else quote_this(str(bcp_path))), sql_item_string, direc, flat_file, '-S', creds.server, '-d', creds.database, '-q', '-e', error_file_path] + auth) if batch_size: bcp_command += ['-b', str(batch_size)] if (direc == IN): bcp_command += ['-f', format_file_path] elif (direc in (OUT, QUERYOUT)): bcp_command += ['-c', quote_this(f"-t{(read_data_settings['delimiter'] if (col_delimiter is None) else col_delimiter)}"), quote_this(f"-r{(read_data_settings['newline'] if (row_terminator is None) else row_terminator)}")] bcp_command_log = [(c if (c != creds.password) else '[REDACTED]') for c in bcp_command] logger.info(f'Executing BCP command now... BCP command is: {bcp_command_log}') ret_code = run_cmd(bcp_command) if ret_code: raise BCPandasException(f'Bcp command failed with exit code {ret_code}')
def get_temp_file() -> str: '\n Returns full path to a temporary file without creating it.\n ' tmp_dir = tempfile.gettempdir() file_path = os.path.join(tmp_dir, ''.join(random.choices((string.ascii_letters + string.digits), k=21))) return file_path
8,124,292,536,605,525,000
Returns full path to a temporary file without creating it.
bcpandas/utils.py
get_temp_file
alon-r/bcpandas
python
def get_temp_file() -> str: '\n \n ' tmp_dir = tempfile.gettempdir() file_path = os.path.join(tmp_dir, .join(random.choices((string.ascii_letters + string.digits), k=21))) return file_path
def _escape(input_string: str) -> str: '\n Adopted from https://github.com/titan550/bcpy/blob/master/bcpy/format_file_builder.py#L25\n ' return input_string.replace('"', '\\"').replace("'", "\\'").replace('\r', '\\r').replace('\n', '\\n')
5,986,205,927,583,255,000
Adopted from https://github.com/titan550/bcpy/blob/master/bcpy/format_file_builder.py#L25
bcpandas/utils.py
_escape
alon-r/bcpandas
python
def _escape(input_string: str) -> str: '\n \n ' return input_string.replace('"', '\\"').replace("'", "\\'").replace('\r', '\\r').replace('\n', '\\n')
def build_format_file(df: pd.DataFrame, delimiter: str, db_cols_order: Optional[Dict[(str, int)]]=None) -> str: "\n Creates the non-xml SQL format file. Puts 4 spaces between each section.\n See https://docs.microsoft.com/en-us/sql/relational-databases/import-export/non-xml-format-files-sql-server\n for the specification of the file.\n\n # TODO add params/options to control:\n # - the char type (not just SQLCHAR),\n\n Parameters\n ----------\n df : pandas DataFrame\n delimiter : a valid delimiter character\n db_cols_order : dict, optional\n Dict of {database column name -> ordinal position of the column}.\n Maps existing columns in the database to their ordinal position, i.e. the order of the columns in the db table.\n 1-indexed, so the first columns is 1, second is 2, etc.\n Only needed if the order of the columns in the dataframe doesn't match the database.\n\n Returns\n -------\n A string containing the format file\n " _space = (' ' * 4) format_file_str = f'''9.0 {len(df.columns)} ''' for (col_num, col_name) in enumerate(df.columns, start=1): _delim = (delimiter if (col_num != len(df.columns)) else NEWLINE) _line = _space.join([str(col_num), SQLCHAR, str(0), str(0), f'"{_escape(_delim)}"', str((col_num if (not db_cols_order) else db_cols_order[str(col_name)])), str(col_name), sql_collation, '\n']) format_file_str += _line return format_file_str
1,567,611,070,143,142,100
Creates the non-xml SQL format file. Puts 4 spaces between each section. See https://docs.microsoft.com/en-us/sql/relational-databases/import-export/non-xml-format-files-sql-server for the specification of the file. # TODO add params/options to control: # - the char type (not just SQLCHAR), Parameters ---------- df : pandas DataFrame delimiter : a valid delimiter character db_cols_order : dict, optional Dict of {database column name -> ordinal position of the column}. Maps existing columns in the database to their ordinal position, i.e. the order of the columns in the db table. 1-indexed, so the first columns is 1, second is 2, etc. Only needed if the order of the columns in the dataframe doesn't match the database. Returns ------- A string containing the format file
bcpandas/utils.py
build_format_file
alon-r/bcpandas
python
def build_format_file(df: pd.DataFrame, delimiter: str, db_cols_order: Optional[Dict[(str, int)]]=None) -> str: "\n Creates the non-xml SQL format file. Puts 4 spaces between each section.\n See https://docs.microsoft.com/en-us/sql/relational-databases/import-export/non-xml-format-files-sql-server\n for the specification of the file.\n\n # TODO add params/options to control:\n # - the char type (not just SQLCHAR),\n\n Parameters\n ----------\n df : pandas DataFrame\n delimiter : a valid delimiter character\n db_cols_order : dict, optional\n Dict of {database column name -> ordinal position of the column}.\n Maps existing columns in the database to their ordinal position, i.e. the order of the columns in the db table.\n 1-indexed, so the first columns is 1, second is 2, etc.\n Only needed if the order of the columns in the dataframe doesn't match the database.\n\n Returns\n -------\n A string containing the format file\n " _space = (' ' * 4) format_file_str = f'9.0 {len(df.columns)} ' for (col_num, col_name) in enumerate(df.columns, start=1): _delim = (delimiter if (col_num != len(df.columns)) else NEWLINE) _line = _space.join([str(col_num), SQLCHAR, str(0), str(0), f'"{_escape(_delim)}"', str((col_num if (not db_cols_order) else db_cols_order[str(col_name)])), str(col_name), sql_collation, '\n']) format_file_str += _line return format_file_str
def quote_this(this: str, skip: bool=False) -> str: "\n OS-safe way to quote a string.\n\n Returns the string with quotes around it.\n On Windows ~~it's double quotes~~ we skip quoting, \n on Linux it's single quotes.\n " if isinstance(this, str): if IS_WIN32: return this else: return shlex.quote(this) else: return this
694,796,248,036,194,300
OS-safe way to quote a string. Returns the string with quotes around it. On Windows ~~it's double quotes~~ we skip quoting, on Linux it's single quotes.
bcpandas/utils.py
quote_this
alon-r/bcpandas
python
def quote_this(this: str, skip: bool=False) -> str: "\n OS-safe way to quote a string.\n\n Returns the string with quotes around it.\n On Windows ~~it's double quotes~~ we skip quoting, \n on Linux it's single quotes.\n " if isinstance(this, str): if IS_WIN32: return this else: return shlex.quote(this) else: return this
def run_cmd(cmd: List[str]) -> int: '\n Runs the given command. \n \n Prints STDOUT in real time, prints STDERR when command is complete, \n and logs both STDOUT and STDERR.\n\n Paramters\n ---------\n cmd : list of str\n The command to run, to be submitted to `subprocess.Popen()`\n\n Returns\n -------\n The exit code of the command\n ' if IS_WIN32: with_shell = False else: with_shell = True cmd = ' '.join(cmd) proc = Popen(cmd, stdout=PIPE, stderr=PIPE, encoding='utf-8', errors='utf-8', shell=with_shell) while True: outs = proc.stdout.readline() if outs: print(outs, end='') logger.info(outs) if ((proc.poll() is not None) and (outs == '')): break errs = proc.stderr.readlines() if errs: print(errs, end='') logger.error(errs) return proc.returncode
-6,838,483,858,176,108,000
Runs the given command. Prints STDOUT in real time, prints STDERR when command is complete, and logs both STDOUT and STDERR. Paramters --------- cmd : list of str The command to run, to be submitted to `subprocess.Popen()` Returns ------- The exit code of the command
bcpandas/utils.py
run_cmd
alon-r/bcpandas
python
def run_cmd(cmd: List[str]) -> int: '\n Runs the given command. \n \n Prints STDOUT in real time, prints STDERR when command is complete, \n and logs both STDOUT and STDERR.\n\n Paramters\n ---------\n cmd : list of str\n The command to run, to be submitted to `subprocess.Popen()`\n\n Returns\n -------\n The exit code of the command\n ' if IS_WIN32: with_shell = False else: with_shell = True cmd = ' '.join(cmd) proc = Popen(cmd, stdout=PIPE, stderr=PIPE, encoding='utf-8', errors='utf-8', shell=with_shell) while True: outs = proc.stdout.readline() if outs: print(outs, end=) logger.info(outs) if ((proc.poll() is not None) and (outs == )): break errs = proc.stderr.readlines() if errs: print(errs, end=) logger.error(errs) return proc.returncode
def get_E_Elc_microwave_d_t(P_Elc_microwave_cook_rtd, t_microwave_cook_d_t): '時刻別消費電力量を計算する\n \n Parameters\n ----------\n P_Elc_microwave_cook_rtd : float\n 調理時の定格待機電力, W\n \n t_microwave_cook_d_t : ndarray(N-dimensional array)\n 1年間の全時間の調理時間を格納したND配列, h\n d日t時の調理時間が年開始時から8760個連続して格納されている\n \n Returns\n ----------\n E_Elc_microwave_d_t : ndarray(N-dimensional array)\n 1年間の全時間の消費電力量を格納したND配列, Wh\n d日t時の消費電力量が年開始時から8760個連続して格納されている\n ' P_Elc_microwave_cook = get_P_Elc_microwave_cook(P_Elc_microwave_cook_rtd) E_Elc_microwave_d_t = (P_Elc_microwave_cook * t_microwave_cook_d_t) E_Elc_microwave_d_t = (E_Elc_microwave_d_t * (10 ** (- 3))) return E_Elc_microwave_d_t
824,538,783,144,946,400
時刻別消費電力量を計算する Parameters ---------- P_Elc_microwave_cook_rtd : float 調理時の定格待機電力, W t_microwave_cook_d_t : ndarray(N-dimensional array) 1年間の全時間の調理時間を格納したND配列, h d日t時の調理時間が年開始時から8760個連続して格納されている Returns ---------- E_Elc_microwave_d_t : ndarray(N-dimensional array) 1年間の全時間の消費電力量を格納したND配列, Wh d日t時の消費電力量が年開始時から8760個連続して格納されている
src/pyhees/section10_j1_f.py
get_E_Elc_microwave_d_t
jjj-design/pyhees
python
def get_E_Elc_microwave_d_t(P_Elc_microwave_cook_rtd, t_microwave_cook_d_t): '時刻別消費電力量を計算する\n \n Parameters\n ----------\n P_Elc_microwave_cook_rtd : float\n 調理時の定格待機電力, W\n \n t_microwave_cook_d_t : ndarray(N-dimensional array)\n 1年間の全時間の調理時間を格納したND配列, h\n d日t時の調理時間が年開始時から8760個連続して格納されている\n \n Returns\n ----------\n E_Elc_microwave_d_t : ndarray(N-dimensional array)\n 1年間の全時間の消費電力量を格納したND配列, Wh\n d日t時の消費電力量が年開始時から8760個連続して格納されている\n ' P_Elc_microwave_cook = get_P_Elc_microwave_cook(P_Elc_microwave_cook_rtd) E_Elc_microwave_d_t = (P_Elc_microwave_cook * t_microwave_cook_d_t) E_Elc_microwave_d_t = (E_Elc_microwave_d_t * (10 ** (- 3))) return E_Elc_microwave_d_t
def get_P_Elc_microwave_cook(P_Elc_microwave_rtd): '調理時の消費電力を計算する\n \n Parameters\n ----------\n P_Elc_microwave_cook_rtd : float\n 調理時の定格待機電力, W\n \n Returns\n ----------\n P_Elc_microwave_cook : float\n 調理時の消費電力, W\n ' P_Elc_microwave_cook = (0.9373 * P_Elc_microwave_rtd) return P_Elc_microwave_cook
8,751,485,143,819,558,000
調理時の消費電力を計算する Parameters ---------- P_Elc_microwave_cook_rtd : float 調理時の定格待機電力, W Returns ---------- P_Elc_microwave_cook : float 調理時の消費電力, W
src/pyhees/section10_j1_f.py
get_P_Elc_microwave_cook
jjj-design/pyhees
python
def get_P_Elc_microwave_cook(P_Elc_microwave_rtd): '調理時の消費電力を計算する\n \n Parameters\n ----------\n P_Elc_microwave_cook_rtd : float\n 調理時の定格待機電力, W\n \n Returns\n ----------\n P_Elc_microwave_cook : float\n 調理時の消費電力, W\n ' P_Elc_microwave_cook = (0.9373 * P_Elc_microwave_rtd) return P_Elc_microwave_cook
def test_hmac(self): '\n From http://en.wikipedia.org/wiki/Hash-based_message_authentication_code\n\n HMAC_SHA1("key", "The quick brown fox jumps over the lazy dog")\n = 0xde7c9b85b8b78aa6bc8a7a36f70a90701c9db4d9\n ' message = 'The quick brown fox jumps over the lazy dog' key = 'a2V5' signature = '3nybhbi3iqa8ino29wqQcBydtNk=' self.assertEqual(signature, _client.sign_hmac(key, message))
8,884,918,142,030,208,000
From http://en.wikipedia.org/wiki/Hash-based_message_authentication_code HMAC_SHA1("key", "The quick brown fox jumps over the lazy dog") = 0xde7c9b85b8b78aa6bc8a7a36f70a90701c9db4d9
test/test_client.py
test_hmac
Harkishen-Singh/google-maps-services-python
python
def test_hmac(self): '\n From http://en.wikipedia.org/wiki/Hash-based_message_authentication_code\n\n HMAC_SHA1("key", "The quick brown fox jumps over the lazy dog")\n = 0xde7c9b85b8b78aa6bc8a7a36f70a90701c9db4d9\n ' message = 'The quick brown fox jumps over the lazy dog' key = 'a2V5' signature = '3nybhbi3iqa8ino29wqQcBydtNk=' self.assertEqual(signature, _client.sign_hmac(key, message))
def __init__(self): 'Constructeur du paramètre.' Parametre.__init__(self, 'creer', 'create') self.schema = '<cle>' self.aide_courte = 'crée un banc de poisson' self.aide_longue = 'Cette commande permet de créer un nouveau banc de poisson. Vous devez préciser en argument la clé identifiant le banc.'
6,566,749,253,367,863,000
Constructeur du paramètre.
src/secondaires/peche/commandes/banc/creer.py
__init__
vincent-lg/tsunami
python
def __init__(self): Parametre.__init__(self, 'creer', 'create') self.schema = '<cle>' self.aide_courte = 'crée un banc de poisson' self.aide_longue = 'Cette commande permet de créer un nouveau banc de poisson. Vous devez préciser en argument la clé identifiant le banc.'
def interpreter(self, personnage, dic_masques): "Méthode d'interprétation de commande" cle = dic_masques['cle'].cle if (cle in importeur.peche.bancs): (personnage << '|err|Ce banc existe déjà.|ff|') return banc = importeur.peche.creer_banc(cle) editeur = importeur.interpreteur.construire_editeur('schooledit', personnage, banc) personnage.contextes.ajouter(editeur) editeur.actualiser()
-7,734,939,328,273,605,000
Méthode d'interprétation de commande
src/secondaires/peche/commandes/banc/creer.py
interpreter
vincent-lg/tsunami
python
def interpreter(self, personnage, dic_masques): cle = dic_masques['cle'].cle if (cle in importeur.peche.bancs): (personnage << '|err|Ce banc existe déjà.|ff|') return banc = importeur.peche.creer_banc(cle) editeur = importeur.interpreteur.construire_editeur('schooledit', personnage, banc) personnage.contextes.ajouter(editeur) editeur.actualiser()
def main(): 'Run administrative tasks.' os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'potato.settings') try: from django.core.management import execute_from_command_line except ImportError as exc: raise ImportError("Couldn't import Django. Are you sure it's installed and available on your PYTHONPATH environment variable? Did you forget to activate a virtual environment?") from exc execute_from_command_line(sys.argv)
181,689,278,771,491,780
Run administrative tasks.
manage.py
main
aidswidjaja/PotatoBoard
python
def main(): os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'potato.settings') try: from django.core.management import execute_from_command_line except ImportError as exc: raise ImportError("Couldn't import Django. Are you sure it's installed and available on your PYTHONPATH environment variable? Did you forget to activate a virtual environment?") from exc execute_from_command_line(sys.argv)
@pytest.mark.xfail(reason='OGR silently fails to convert strings') def test_write_mismatch(self): 'TOFIX: OGR silently fails to convert strings' schema = {'geometry': 'Point', 'properties': {'label': 'str', 'num': 'int'}} with fiona.open(os.path.join(self.tempdir, 'test-write-fail.shp'), 'w', driver='ESRI Shapefile', schema=schema, encoding='latin1') as c: c.writerecords([{'type': 'Feature', 'geometry': {'type': 'Point', 'coordinates': [0, 0]}, 'properties': {'label': u'徐汇区', 'num': 0}}]) with fiona.open(os.path.join(self.tempdir), encoding='latin1') as c: f = next(iter(c)) self.assertEqual(f['properties']['label'], u'徐汇区')
-2,003,226,171,940,806,400
TOFIX: OGR silently fails to convert strings
tests/test_unicode.py
test_write_mismatch
Juanlu001/Fiona
python
@pytest.mark.xfail(reason='OGR silently fails to convert strings') def test_write_mismatch(self): schema = {'geometry': 'Point', 'properties': {'label': 'str', 'num': 'int'}} with fiona.open(os.path.join(self.tempdir, 'test-write-fail.shp'), 'w', driver='ESRI Shapefile', schema=schema, encoding='latin1') as c: c.writerecords([{'type': 'Feature', 'geometry': {'type': 'Point', 'coordinates': [0, 0]}, 'properties': {'label': u'徐汇区', 'num': 0}}]) with fiona.open(os.path.join(self.tempdir), encoding='latin1') as c: f = next(iter(c)) self.assertEqual(f['properties']['label'], u'徐汇区')
def test_write_gb18030(self): 'Can write a simplified Chinese shapefile' schema = {'geometry': 'Point', 'properties': {'label': 'str', 'num': 'int'}} with fiona.open(os.path.join(self.tempdir, 'test-write-gb18030.shp'), 'w', driver='ESRI Shapefile', schema=schema, encoding='gb18030') as c: c.writerecords([{'type': 'Feature', 'geometry': {'type': 'Point', 'coordinates': [0, 0]}, 'properties': {'label': u'徐汇区', 'num': 0}}]) with fiona.open(os.path.join(self.tempdir), encoding='gb18030') as c: f = next(iter(c)) self.assertEqual(f['properties']['label'], u'徐汇区') self.assertEqual(f['properties']['num'], 0)
-5,254,057,672,360,205,000
Can write a simplified Chinese shapefile
tests/test_unicode.py
test_write_gb18030
Juanlu001/Fiona
python
def test_write_gb18030(self): schema = {'geometry': 'Point', 'properties': {'label': 'str', 'num': 'int'}} with fiona.open(os.path.join(self.tempdir, 'test-write-gb18030.shp'), 'w', driver='ESRI Shapefile', schema=schema, encoding='gb18030') as c: c.writerecords([{'type': 'Feature', 'geometry': {'type': 'Point', 'coordinates': [0, 0]}, 'properties': {'label': u'徐汇区', 'num': 0}}]) with fiona.open(os.path.join(self.tempdir), encoding='gb18030') as c: f = next(iter(c)) self.assertEqual(f['properties']['label'], u'徐汇区') self.assertEqual(f['properties']['num'], 0)
def __init__(self): '\n Use __init__ to define parameter network needs\n ' self.shared_param_list = ['offset_p2', 'offset_p3', 'offset_p4', 'offset_p5', 'rpn_conv', 'rpn_cls_score', 'rpn_bbox_pred'] self.shared_param_dict = {} for name in self.shared_param_list: self.shared_param_dict[(name + '_weight')] = mx.sym.Variable((name + '_weight')) self.shared_param_dict[(name + '_bias')] = mx.sym.Variable((name + '_bias'))
1,378,601,055,576,555,500
Use __init__ to define parameter network needs
fpn/symbols/resnet_v1_101_fpn_dcn_rcnn.py
__init__
chi3x10/RepMet
python
def __init__(self): '\n \n ' self.shared_param_list = ['offset_p2', 'offset_p3', 'offset_p4', 'offset_p5', 'rpn_conv', 'rpn_cls_score', 'rpn_bbox_pred'] self.shared_param_dict = {} for name in self.shared_param_list: self.shared_param_dict[(name + '_weight')] = mx.sym.Variable((name + '_weight')) self.shared_param_dict[(name + '_bias')] = mx.sym.Variable((name + '_bias'))
def sort_human(l): 'Sort a list of strings by numerical.' def convert(text): return (float(text) if text.isdigit() else text) def alphanum(key): return [convert(c) for c in re.split('([-+]?[0-9]*\\.?[0-9]*)', key)] l.sort(key=alphanum) return l
-7,997,151,349,529,988,000
Sort a list of strings by numerical.
results_processing/ABC/csv_processing.py
sort_human
multimodalspectroscopy/hypothermia-bayescmd
python
def sort_human(l): def convert(text): return (float(text) if text.isdigit() else text) def alphanum(key): return [convert(c) for c in re.split('([-+]?[0-9]*\\.?[0-9]*)', key)] l.sort(key=alphanum) return l
def data_merge_by_batch(parent_directory, verbose=True): 'Merge a set of parameters.csv files into one.\n\n This is intended for use with batch processes from Legion, with each batch\n being 1000 runs longand numbered with integer values.\n\n Parameters\n ----------\n parent_directory : :obj:`list` of :obj:`str`\n Parent directory to a set of directories each containing model runs and\n a parameters.csv file.\n verbose : :obj:`boolean`, optional\n Boolean indicator of whether to print extra information.\n\n Returns\n -------\n None\n Concatenated will be written to file in `parent_directory`\n\n ' dirs = [os.path.abspath(os.path.join(parent_directory, d)) for d in os.listdir(parent_directory) if (os.path.isdir(os.path.abspath(os.path.join(parent_directory, d))) and (d != 'archives'))] dirs = sort_human(dirs) if verbose: print(dirs) dfs = [] for d in dirs: try: dfs.append(pd.read_csv(os.path.join(d, 'parameters.csv'))) ii = (len(dfs) - 1) print('Processing parameter file {}'.format(ii)) if (ii is not 0): dfs[ii]['ix'] = ((dfs[ii].index.values + dfs[(ii - 1)]['ix'].values[(- 1)]) + 1) else: dfs[ii]['ix'] = dfs[ii].index.values if os.path.split(d)[1].split('_')[(- 1)].isdigit(): print(os.path.split(d)[1].split('_')[(- 1)]) dfs[ii]['Batch'] = int(os.path.split(d)[1].split('_')[(- 1)]) else: print('Batch number not found for {}'.format(d)) continue except FileNotFoundError: print('No parameters file in {}'.format(d)) continue if verbose: print('{} dataframes to be joined'.format(len(dfs))) df = pd.concat(dfs) df.index = range(len(df)) output_file = os.path.join(parent_directory, 'all_parameters.csv') df.to_csv(output_file, index=False) return output_file
-9,222,835,480,730,805,000
Merge a set of parameters.csv files into one. This is intended for use with batch processes from Legion, with each batch being 1000 runs longand numbered with integer values. Parameters ---------- parent_directory : :obj:`list` of :obj:`str` Parent directory to a set of directories each containing model runs and a parameters.csv file. verbose : :obj:`boolean`, optional Boolean indicator of whether to print extra information. Returns ------- None Concatenated will be written to file in `parent_directory`
results_processing/ABC/csv_processing.py
data_merge_by_batch
multimodalspectroscopy/hypothermia-bayescmd
python
def data_merge_by_batch(parent_directory, verbose=True): 'Merge a set of parameters.csv files into one.\n\n This is intended for use with batch processes from Legion, with each batch\n being 1000 runs longand numbered with integer values.\n\n Parameters\n ----------\n parent_directory : :obj:`list` of :obj:`str`\n Parent directory to a set of directories each containing model runs and\n a parameters.csv file.\n verbose : :obj:`boolean`, optional\n Boolean indicator of whether to print extra information.\n\n Returns\n -------\n None\n Concatenated will be written to file in `parent_directory`\n\n ' dirs = [os.path.abspath(os.path.join(parent_directory, d)) for d in os.listdir(parent_directory) if (os.path.isdir(os.path.abspath(os.path.join(parent_directory, d))) and (d != 'archives'))] dirs = sort_human(dirs) if verbose: print(dirs) dfs = [] for d in dirs: try: dfs.append(pd.read_csv(os.path.join(d, 'parameters.csv'))) ii = (len(dfs) - 1) print('Processing parameter file {}'.format(ii)) if (ii is not 0): dfs[ii]['ix'] = ((dfs[ii].index.values + dfs[(ii - 1)]['ix'].values[(- 1)]) + 1) else: dfs[ii]['ix'] = dfs[ii].index.values if os.path.split(d)[1].split('_')[(- 1)].isdigit(): print(os.path.split(d)[1].split('_')[(- 1)]) dfs[ii]['Batch'] = int(os.path.split(d)[1].split('_')[(- 1)]) else: print('Batch number not found for {}'.format(d)) continue except FileNotFoundError: print('No parameters file in {}'.format(d)) continue if verbose: print('{} dataframes to be joined'.format(len(dfs))) df = pd.concat(dfs) df.index = range(len(df)) output_file = os.path.join(parent_directory, 'all_parameters.csv') df.to_csv(output_file, index=False) return output_file
def valid(args, model, writer, test_loader, global_step): '\n :param args: 参数Config\n :param model: 需验证模型\n :param writer: TB写入\n :param test_loader: 测试数据集\n :param global_step: 全局step\n :return:\n ' eval_losses = AverageMeter() model.eval() (all_preds, all_label) = ([], []) epoch_iterator = tqdm(test_loader, desc='Validating... (loss=X.X)', bar_format='{l_bar}{r_bar}', dynamic_ncols=True) loss_fct = torch.nn.CrossEntropyLoss() global_eval_step = 0 for (step, batch) in enumerate(epoch_iterator): global_eval_step += 1 batch = tuple((t.to(args.device) for t in batch)) (x, y) = batch with torch.no_grad(): logits = model(x)[0] eval_loss = loss_fct(logits, y) eval_losses.update(eval_loss.item()) preds = torch.argmax(logits, dim=(- 1)) if (len(all_preds) == 0): all_preds.append(preds.detach().cpu().numpy()) all_label.append(y.detach().cpu().numpy()) else: all_preds[0] = np.append(all_preds[0], preds.detach().cpu().numpy(), axis=0) all_label[0] = np.append(all_label[0], y.detach().cpu().numpy(), axis=0) epoch_iterator.set_description(('Validating... (loss=%2.5f)' % eval_losses.val)) writer.add_scalar('Train/loss', scalar_value=eval_losses.val, global_step=global_eval_step) (all_preds, all_label) = (all_preds[0], all_label[0]) accuracy = simple_accuracy(all_preds, all_label) writer.add_scalar('test/accuracy', scalar_value=accuracy, global_step=global_step) return accuracy
180,415,128,705,602,140
:param args: 参数Config :param model: 需验证模型 :param writer: TB写入 :param test_loader: 测试数据集 :param global_step: 全局step :return:
VIT/Train.py
valid
HzcIrving/DLRL-PlayGround
python
def valid(args, model, writer, test_loader, global_step): '\n :param args: 参数Config\n :param model: 需验证模型\n :param writer: TB写入\n :param test_loader: 测试数据集\n :param global_step: 全局step\n :return:\n ' eval_losses = AverageMeter() model.eval() (all_preds, all_label) = ([], []) epoch_iterator = tqdm(test_loader, desc='Validating... (loss=X.X)', bar_format='{l_bar}{r_bar}', dynamic_ncols=True) loss_fct = torch.nn.CrossEntropyLoss() global_eval_step = 0 for (step, batch) in enumerate(epoch_iterator): global_eval_step += 1 batch = tuple((t.to(args.device) for t in batch)) (x, y) = batch with torch.no_grad(): logits = model(x)[0] eval_loss = loss_fct(logits, y) eval_losses.update(eval_loss.item()) preds = torch.argmax(logits, dim=(- 1)) if (len(all_preds) == 0): all_preds.append(preds.detach().cpu().numpy()) all_label.append(y.detach().cpu().numpy()) else: all_preds[0] = np.append(all_preds[0], preds.detach().cpu().numpy(), axis=0) all_label[0] = np.append(all_label[0], y.detach().cpu().numpy(), axis=0) epoch_iterator.set_description(('Validating... (loss=%2.5f)' % eval_losses.val)) writer.add_scalar('Train/loss', scalar_value=eval_losses.val, global_step=global_eval_step) (all_preds, all_label) = (all_preds[0], all_label[0]) accuracy = simple_accuracy(all_preds, all_label) writer.add_scalar('test/accuracy', scalar_value=accuracy, global_step=global_step) return accuracy
def train(args=VITConfig()): '\n :param args:\n - log_dir\n ' pretrained_model_config = CONFIGS[args.model_type] num_classes = (10 if (args.dataset == 'cifar10') else 100) model = VITransModel(pretrained_model_config, args.img_size, zero_head=True, num_classes=num_classes) model.load_from(np.load(args.pretrained_dir)) model.to(device=args.device) num_params = count_parameters(model) if args.TB_log: os.makedirs(args.log_dir, exist_ok=True) writer = SummaryWriter(logdir=((args.log_dir + args.record_algo) + args.test_cycles)) args.train_batch_size = (args.train_batch_size // args.gradient_accumulation_steps) (train_loader, test_loader) = get_loader(args) optimizer = torch.optim.SGD(model.parameters(), lr=args.learning_rate, momentum=0.9, weight_decay=args.weight_decay) t_total = args.num_steps if (args.decay_type == 'cosine'): scheduler = WarmupCosineSchedule(optimizer, warmup_steps=args.warmup_steps, t_total=t_total) else: scheduler = WarmupLinearSchedule(optimizer, warmup_steps=args.warmup_steps, t_total=t_total) model.zero_grad() set_seed(args.seed) losses = AverageMeter() global_step = 0 best_acc = 0 while True: model.train() epoch_iterator = tqdm(train_loader, desc='Training (X / X Steps) (loss=X.X)', bar_format='{l_bar}{r_bar}', dynamic_ncols=True) for (step, batch) in enumerate(epoch_iterator): batch = tuple((t.to(args.device) for t in batch)) (x, y) = batch loss = model.forward(x, y) loss.backward() if (((step + 1) % args.gradient_accumulation_steps) == 0): losses.update((loss.item() * args.gradient_accumulation_steps)) torch.nn.utils.clip_grad_norm(model.parameters(), 1.0) scheduler.step() optimizer.step() optimizer.zero_grad() global_step += 1 epoch_iterator.set_description(('Training (%d / %d Steps) (loss=%2.5f)' % (global_step, t_total, losses.val))) writer.add_scalar('Train/loss', scalar_value=losses.val, global_step=global_step) writer.add_scalar('Train/lr', scalar_value=scheduler.get_lr()[0], global_step=global_step) if ((global_step % args.eval_every) == 0): accuracy = valid(args, model, writer, test_loader, global_step) if (best_acc < accuracy): best_acc = accuracy model_save((args.record_algo + args.test_cycles), model) model.train() if ((global_step % t_total) == 0): break losses.reset() if ((global_step % t_total) == 0): break writer.close() print(('===' * 30)) print(('Best Accuracy: \t%f' % best_acc)) print('End Training!') print(('===' * 30))
128,077,709,540,103,760
:param args: - log_dir
VIT/Train.py
train
HzcIrving/DLRL-PlayGround
python
def train(args=VITConfig()): '\n :param args:\n - log_dir\n ' pretrained_model_config = CONFIGS[args.model_type] num_classes = (10 if (args.dataset == 'cifar10') else 100) model = VITransModel(pretrained_model_config, args.img_size, zero_head=True, num_classes=num_classes) model.load_from(np.load(args.pretrained_dir)) model.to(device=args.device) num_params = count_parameters(model) if args.TB_log: os.makedirs(args.log_dir, exist_ok=True) writer = SummaryWriter(logdir=((args.log_dir + args.record_algo) + args.test_cycles)) args.train_batch_size = (args.train_batch_size // args.gradient_accumulation_steps) (train_loader, test_loader) = get_loader(args) optimizer = torch.optim.SGD(model.parameters(), lr=args.learning_rate, momentum=0.9, weight_decay=args.weight_decay) t_total = args.num_steps if (args.decay_type == 'cosine'): scheduler = WarmupCosineSchedule(optimizer, warmup_steps=args.warmup_steps, t_total=t_total) else: scheduler = WarmupLinearSchedule(optimizer, warmup_steps=args.warmup_steps, t_total=t_total) model.zero_grad() set_seed(args.seed) losses = AverageMeter() global_step = 0 best_acc = 0 while True: model.train() epoch_iterator = tqdm(train_loader, desc='Training (X / X Steps) (loss=X.X)', bar_format='{l_bar}{r_bar}', dynamic_ncols=True) for (step, batch) in enumerate(epoch_iterator): batch = tuple((t.to(args.device) for t in batch)) (x, y) = batch loss = model.forward(x, y) loss.backward() if (((step + 1) % args.gradient_accumulation_steps) == 0): losses.update((loss.item() * args.gradient_accumulation_steps)) torch.nn.utils.clip_grad_norm(model.parameters(), 1.0) scheduler.step() optimizer.step() optimizer.zero_grad() global_step += 1 epoch_iterator.set_description(('Training (%d / %d Steps) (loss=%2.5f)' % (global_step, t_total, losses.val))) writer.add_scalar('Train/loss', scalar_value=losses.val, global_step=global_step) writer.add_scalar('Train/lr', scalar_value=scheduler.get_lr()[0], global_step=global_step) if ((global_step % args.eval_every) == 0): accuracy = valid(args, model, writer, test_loader, global_step) if (best_acc < accuracy): best_acc = accuracy model_save((args.record_algo + args.test_cycles), model) model.train() if ((global_step % t_total) == 0): break losses.reset() if ((global_step % t_total) == 0): break writer.close() print(('===' * 30)) print(('Best Accuracy: \t%f' % best_acc)) print('End Training!') print(('===' * 30))
@pytest.fixture(scope='module') def tf_support(): 'Boolean fixture for TensorFlow support' try: import tensorflow as tf tf_support = True except ImportError as e: tf_support = False return tf_support
361,981,713,551,426,300
Boolean fixture for TensorFlow support
artifacts/old_dataset_versions/minimal_commits/pennylane/pennylane#385/after/test_tf.py
tf_support
MattePalte/Bugs-Quantum-Computing-Platforms
python
@pytest.fixture(scope='module') def tf_support(): try: import tensorflow as tf tf_support = True except ImportError as e: tf_support = False return tf_support
def test_qnode_fails_on_wrong_return_type(self, qubit_device_2_wires): 'The qfunc must return only Expectations' @qml.qnode(qubit_device_2_wires, interface='tf') def qf(x): qml.RX(x, wires=[0]) return (qml.expval(qml.PauliZ(0)), 0.3) with pytest.raises(QuantumFunctionError, match='must return either'): qf(Variable(0.5))
-151,206,726,214,130,500
The qfunc must return only Expectations
artifacts/old_dataset_versions/minimal_commits/pennylane/pennylane#385/after/test_tf.py
test_qnode_fails_on_wrong_return_type
MattePalte/Bugs-Quantum-Computing-Platforms
python
def test_qnode_fails_on_wrong_return_type(self, qubit_device_2_wires): @qml.qnode(qubit_device_2_wires, interface='tf') def qf(x): qml.RX(x, wires=[0]) return (qml.expval(qml.PauliZ(0)), 0.3) with pytest.raises(QuantumFunctionError, match='must return either'): qf(Variable(0.5))
def test_qnode_fails_on_expval_not_returned(self, qubit_device_2_wires): 'All expectation values in the qfunc must be returned' @qml.qnode(qubit_device_2_wires, interface='tf') def qf(x): qml.RX(x, wires=[0]) ex = qml.expval(qml.PauliZ(1)) return qml.expval(qml.PauliZ(0)) with pytest.raises(QuantumFunctionError, match='All measured observables'): qf(Variable(0.5))
-7,956,166,682,982,606,000
All expectation values in the qfunc must be returned
artifacts/old_dataset_versions/minimal_commits/pennylane/pennylane#385/after/test_tf.py
test_qnode_fails_on_expval_not_returned
MattePalte/Bugs-Quantum-Computing-Platforms
python
def test_qnode_fails_on_expval_not_returned(self, qubit_device_2_wires): @qml.qnode(qubit_device_2_wires, interface='tf') def qf(x): qml.RX(x, wires=[0]) ex = qml.expval(qml.PauliZ(1)) return qml.expval(qml.PauliZ(0)) with pytest.raises(QuantumFunctionError, match='All measured observables'): qf(Variable(0.5))
def test_qnode_fails_on_wrong_expval_order(self, qubit_device_2_wires): 'Expvals must be returned in the order they were created in' @qml.qnode(qubit_device_2_wires, interface='tf') def qf(x): qml.RX(x, wires=[0]) ex = qml.expval(qml.PauliZ(1)) return (qml.expval(qml.PauliZ(0)), ex) with pytest.raises(QuantumFunctionError, match='All measured observables'): qf(Variable(0.5))
-5,130,039,859,913,704,000
Expvals must be returned in the order they were created in
artifacts/old_dataset_versions/minimal_commits/pennylane/pennylane#385/after/test_tf.py
test_qnode_fails_on_wrong_expval_order
MattePalte/Bugs-Quantum-Computing-Platforms
python
def test_qnode_fails_on_wrong_expval_order(self, qubit_device_2_wires): @qml.qnode(qubit_device_2_wires, interface='tf') def qf(x): qml.RX(x, wires=[0]) ex = qml.expval(qml.PauliZ(1)) return (qml.expval(qml.PauliZ(0)), ex) with pytest.raises(QuantumFunctionError, match='All measured observables'): qf(Variable(0.5))
def test_qnode_fails_on_gates_after_measurements(self, qubit_device_2_wires): 'Gates have to precede measurements' @qml.qnode(qubit_device_2_wires, interface='tf') def qf(x): qml.RX(x, wires=[0]) ev = qml.expval(qml.PauliZ(1)) qml.RY(0.5, wires=[0]) return ev with pytest.raises(QuantumFunctionError, match='gates must precede'): qf(Variable(0.5))
-8,088,013,940,943,135,000
Gates have to precede measurements
artifacts/old_dataset_versions/minimal_commits/pennylane/pennylane#385/after/test_tf.py
test_qnode_fails_on_gates_after_measurements
MattePalte/Bugs-Quantum-Computing-Platforms
python
def test_qnode_fails_on_gates_after_measurements(self, qubit_device_2_wires): @qml.qnode(qubit_device_2_wires, interface='tf') def qf(x): qml.RX(x, wires=[0]) ev = qml.expval(qml.PauliZ(1)) qml.RY(0.5, wires=[0]) return ev with pytest.raises(QuantumFunctionError, match='gates must precede'): qf(Variable(0.5))
def test_qnode_fails_on_multiple_measurements_of_same_wire(self, qubit_device_2_wires): 'A wire can only be measured once' @qml.qnode(qubit_device_2_wires, interface='tf') def qf(x): qml.RX(x, wires=[0]) qml.CNOT(wires=[0, 1]) return (qml.expval(qml.PauliZ(0)), qml.expval(qml.PauliZ(1)), qml.expval(qml.PauliX(0))) with pytest.raises(QuantumFunctionError, match='can only be measured once'): qf(Variable(0.5))
3,540,677,449,148,389,000
A wire can only be measured once
artifacts/old_dataset_versions/minimal_commits/pennylane/pennylane#385/after/test_tf.py
test_qnode_fails_on_multiple_measurements_of_same_wire
MattePalte/Bugs-Quantum-Computing-Platforms
python
def test_qnode_fails_on_multiple_measurements_of_same_wire(self, qubit_device_2_wires): @qml.qnode(qubit_device_2_wires, interface='tf') def qf(x): qml.RX(x, wires=[0]) qml.CNOT(wires=[0, 1]) return (qml.expval(qml.PauliZ(0)), qml.expval(qml.PauliZ(1)), qml.expval(qml.PauliX(0))) with pytest.raises(QuantumFunctionError, match='can only be measured once'): qf(Variable(0.5))
def test_qnode_fails_on_qfunc_with_too_many_wires(self, qubit_device_2_wires): 'The device must have sufficient wires for the qfunc' @qml.qnode(qubit_device_2_wires, interface='tf') def qf(x): qml.RX(x, wires=[0]) qml.CNOT(wires=[0, 2]) return qml.expval(qml.PauliZ(0)) with pytest.raises(QuantumFunctionError, match='applied to invalid wire'): qf(Variable(0.5))
-9,174,797,300,855,287,000
The device must have sufficient wires for the qfunc
artifacts/old_dataset_versions/minimal_commits/pennylane/pennylane#385/after/test_tf.py
test_qnode_fails_on_qfunc_with_too_many_wires
MattePalte/Bugs-Quantum-Computing-Platforms
python
def test_qnode_fails_on_qfunc_with_too_many_wires(self, qubit_device_2_wires): @qml.qnode(qubit_device_2_wires, interface='tf') def qf(x): qml.RX(x, wires=[0]) qml.CNOT(wires=[0, 2]) return qml.expval(qml.PauliZ(0)) with pytest.raises(QuantumFunctionError, match='applied to invalid wire'): qf(Variable(0.5))
def test_qnode_fails_on_combination_of_cv_and_qbit_ops(self, qubit_device_1_wire): 'CV and discrete operations must not be mixed' @qml.qnode(qubit_device_1_wire, interface='tf') def qf(x): qml.RX(x, wires=[0]) qml.Displacement(0.5, 0, wires=[0]) return qml.expval(qml.PauliZ(0)) with pytest.raises(QuantumFunctionError, match='Continuous and discrete'): qf(Variable(0.5))
-4,822,898,876,985,808,000
CV and discrete operations must not be mixed
artifacts/old_dataset_versions/minimal_commits/pennylane/pennylane#385/after/test_tf.py
test_qnode_fails_on_combination_of_cv_and_qbit_ops
MattePalte/Bugs-Quantum-Computing-Platforms
python
def test_qnode_fails_on_combination_of_cv_and_qbit_ops(self, qubit_device_1_wire): @qml.qnode(qubit_device_1_wire, interface='tf') def qf(x): qml.RX(x, wires=[0]) qml.Displacement(0.5, 0, wires=[0]) return qml.expval(qml.PauliZ(0)) with pytest.raises(QuantumFunctionError, match='Continuous and discrete'): qf(Variable(0.5))
def test_qnode_fails_for_cv_ops_on_qubit_device(self, qubit_device_1_wire): 'A qubit device cannot execute CV operations' @qml.qnode(qubit_device_1_wire, interface='tf') def qf(x): qml.Displacement(0.5, 0, wires=[0]) return qml.expval(qml.X(0)) with pytest.raises(DeviceError, match='Gate [a-zA-Z]+ not supported on device'): qf(Variable(0.5))
-2,523,454,052,245,777,400
A qubit device cannot execute CV operations
artifacts/old_dataset_versions/minimal_commits/pennylane/pennylane#385/after/test_tf.py
test_qnode_fails_for_cv_ops_on_qubit_device
MattePalte/Bugs-Quantum-Computing-Platforms
python
def test_qnode_fails_for_cv_ops_on_qubit_device(self, qubit_device_1_wire): @qml.qnode(qubit_device_1_wire, interface='tf') def qf(x): qml.Displacement(0.5, 0, wires=[0]) return qml.expval(qml.X(0)) with pytest.raises(DeviceError, match='Gate [a-zA-Z]+ not supported on device'): qf(Variable(0.5))
def test_qnode_fails_for_cv_observables_on_qubit_device(self, qubit_device_1_wire): 'A qubit device cannot measure CV observables' @qml.qnode(qubit_device_1_wire, interface='tf') def qf(x): return qml.expval(qml.X(0)) with pytest.raises(DeviceError, match='Observable [a-zA-Z]+ not supported on device'): qf(Variable(0.5))
1,553,786,248,707,053,000
A qubit device cannot measure CV observables
artifacts/old_dataset_versions/minimal_commits/pennylane/pennylane#385/after/test_tf.py
test_qnode_fails_for_cv_observables_on_qubit_device
MattePalte/Bugs-Quantum-Computing-Platforms
python
def test_qnode_fails_for_cv_observables_on_qubit_device(self, qubit_device_1_wire): @qml.qnode(qubit_device_1_wire, interface='tf') def qf(x): return qml.expval(qml.X(0)) with pytest.raises(DeviceError, match='Observable [a-zA-Z]+ not supported on device'): qf(Variable(0.5))
def test_qnode_fanout(self, qubit_device_1_wire, tol): 'Tests that qnodes can compute the correct function when the same parameter is used in multiple gates.' @qml.qnode(qubit_device_1_wire, interface='tf') def circuit(reused_param, other_param): qml.RX(reused_param, wires=[0]) qml.RZ(other_param, wires=[0]) qml.RX(reused_param, wires=[0]) return qml.expval(qml.PauliZ(0)) thetas = tf.linspace(((- 2) * np.pi), (2 * np.pi), 7) for reused_param in thetas: for theta in thetas: other_param = ((theta ** 2) / 11) y_eval = circuit(reused_param, other_param) Rx = Rotx(reused_param.numpy()) Rz = Rotz(other_param.numpy()) zero_state = np.array([1.0, 0.0]) final_state = (((Rx @ Rz) @ Rx) @ zero_state) y_true = expZ(final_state) assert np.allclose(y_eval, y_true, atol=tol, rtol=0)
-814,788,456,880,821,200
Tests that qnodes can compute the correct function when the same parameter is used in multiple gates.
artifacts/old_dataset_versions/minimal_commits/pennylane/pennylane#385/after/test_tf.py
test_qnode_fanout
MattePalte/Bugs-Quantum-Computing-Platforms
python
def test_qnode_fanout(self, qubit_device_1_wire, tol): @qml.qnode(qubit_device_1_wire, interface='tf') def circuit(reused_param, other_param): qml.RX(reused_param, wires=[0]) qml.RZ(other_param, wires=[0]) qml.RX(reused_param, wires=[0]) return qml.expval(qml.PauliZ(0)) thetas = tf.linspace(((- 2) * np.pi), (2 * np.pi), 7) for reused_param in thetas: for theta in thetas: other_param = ((theta ** 2) / 11) y_eval = circuit(reused_param, other_param) Rx = Rotx(reused_param.numpy()) Rz = Rotz(other_param.numpy()) zero_state = np.array([1.0, 0.0]) final_state = (((Rx @ Rz) @ Rx) @ zero_state) y_true = expZ(final_state) assert np.allclose(y_eval, y_true, atol=tol, rtol=0)
def test_qnode_array_parameters_scalar_return(self, qubit_device_1_wire, tol): 'Test that QNode can take arrays as input arguments, and that they interact properly with TensorFlow.\n Test case for a circuit that returns a scalar.' @qml.qnode(qubit_device_1_wire, interface='tf') def circuit(dummy1, array, dummy2): qml.RY((0.5 * array[(0, 1)]), wires=0) qml.RY(((- 0.5) * array[(1, 1)]), wires=0) return qml.expval(qml.PauliX(0)) grad_target = (np.array(1.0), np.array([[0.5, 0.43879, 0], [0, (- 0.43879), 0]]), np.array((- 0.4))) cost_target = 1.03257 args = (Variable(0.46), Variable([[2.0, 3.0, 0.3], [7.0, 4.0, 2.1]]), Variable((- 0.13))) def cost(x, array, y): c = tf.cast(circuit(tf.constant(0.111), array, tf.constant(4.5)), tf.float32) return (((c + (0.5 * array[(0, 0)])) + x) - (0.4 * y)) with tf.GradientTape() as tape: cost_res = cost(*args) grad_res = np.array([i.numpy() for i in tape.gradient(cost_res, [args[0], args[2]])]) assert np.allclose(cost_res.numpy(), cost_target, atol=tol, rtol=0) assert np.allclose(grad_res, np.fromiter(grad_target[::2], dtype=np.float32), atol=tol, rtol=0)
6,721,836,414,371,300,000
Test that QNode can take arrays as input arguments, and that they interact properly with TensorFlow. Test case for a circuit that returns a scalar.
artifacts/old_dataset_versions/minimal_commits/pennylane/pennylane#385/after/test_tf.py
test_qnode_array_parameters_scalar_return
MattePalte/Bugs-Quantum-Computing-Platforms
python
def test_qnode_array_parameters_scalar_return(self, qubit_device_1_wire, tol): 'Test that QNode can take arrays as input arguments, and that they interact properly with TensorFlow.\n Test case for a circuit that returns a scalar.' @qml.qnode(qubit_device_1_wire, interface='tf') def circuit(dummy1, array, dummy2): qml.RY((0.5 * array[(0, 1)]), wires=0) qml.RY(((- 0.5) * array[(1, 1)]), wires=0) return qml.expval(qml.PauliX(0)) grad_target = (np.array(1.0), np.array([[0.5, 0.43879, 0], [0, (- 0.43879), 0]]), np.array((- 0.4))) cost_target = 1.03257 args = (Variable(0.46), Variable([[2.0, 3.0, 0.3], [7.0, 4.0, 2.1]]), Variable((- 0.13))) def cost(x, array, y): c = tf.cast(circuit(tf.constant(0.111), array, tf.constant(4.5)), tf.float32) return (((c + (0.5 * array[(0, 0)])) + x) - (0.4 * y)) with tf.GradientTape() as tape: cost_res = cost(*args) grad_res = np.array([i.numpy() for i in tape.gradient(cost_res, [args[0], args[2]])]) assert np.allclose(cost_res.numpy(), cost_target, atol=tol, rtol=0) assert np.allclose(grad_res, np.fromiter(grad_target[::2], dtype=np.float32), atol=tol, rtol=0)
def test_qnode_array_parameters_1_vector_return(self, qubit_device_1_wire, tol): 'Test that QNode can take arrays as input arguments, and that they interact properly with TensorFlow\n Test case for a circuit that returns a 1-vector.' @qml.qnode(qubit_device_1_wire, interface='tf') def circuit(dummy1, array, dummy2): qml.RY((0.5 * array[(0, 1)]), wires=0) qml.RY(((- 0.5) * array[(1, 1)]), wires=0) return (qml.expval(qml.PauliX(0)),) grad_target = (np.array(1.0), np.array([[0.5, 0.43879, 0], [0, (- 0.43879), 0]]), np.array((- 0.4))) cost_target = 1.03257 args = (Variable(0.46), Variable([[2.0, 3.0, 0.3], [7.0, 4.0, 2.1]]), Variable((- 0.13))) def cost(x, array, y): c = tf.cast(circuit(tf.constant(0.111), array, tf.constant(4.5)), tf.float32) c = c[0] return (((c + (0.5 * array[(0, 0)])) + x) - (0.4 * y)) with tf.GradientTape() as tape: cost_res = cost(*args) grad_res = np.array([i.numpy() for i in tape.gradient(cost_res, [args[0], args[2]])]) assert np.allclose(cost_res.numpy(), cost_target, atol=tol, rtol=0) assert np.allclose(grad_res, np.fromiter(grad_target[::2], dtype=np.float32), atol=tol, rtol=0)
1,966,179,864,155,857,400
Test that QNode can take arrays as input arguments, and that they interact properly with TensorFlow Test case for a circuit that returns a 1-vector.
artifacts/old_dataset_versions/minimal_commits/pennylane/pennylane#385/after/test_tf.py
test_qnode_array_parameters_1_vector_return
MattePalte/Bugs-Quantum-Computing-Platforms
python
def test_qnode_array_parameters_1_vector_return(self, qubit_device_1_wire, tol): 'Test that QNode can take arrays as input arguments, and that they interact properly with TensorFlow\n Test case for a circuit that returns a 1-vector.' @qml.qnode(qubit_device_1_wire, interface='tf') def circuit(dummy1, array, dummy2): qml.RY((0.5 * array[(0, 1)]), wires=0) qml.RY(((- 0.5) * array[(1, 1)]), wires=0) return (qml.expval(qml.PauliX(0)),) grad_target = (np.array(1.0), np.array([[0.5, 0.43879, 0], [0, (- 0.43879), 0]]), np.array((- 0.4))) cost_target = 1.03257 args = (Variable(0.46), Variable([[2.0, 3.0, 0.3], [7.0, 4.0, 2.1]]), Variable((- 0.13))) def cost(x, array, y): c = tf.cast(circuit(tf.constant(0.111), array, tf.constant(4.5)), tf.float32) c = c[0] return (((c + (0.5 * array[(0, 0)])) + x) - (0.4 * y)) with tf.GradientTape() as tape: cost_res = cost(*args) grad_res = np.array([i.numpy() for i in tape.gradient(cost_res, [args[0], args[2]])]) assert np.allclose(cost_res.numpy(), cost_target, atol=tol, rtol=0) assert np.allclose(grad_res, np.fromiter(grad_target[::2], dtype=np.float32), atol=tol, rtol=0)
def test_qnode_array_parameters_2_vector_return(self, qubit_device_2_wires, tol): 'Test that QNode can take arrays as input arguments, and that they interact properly with TensorFlow\n Test case for a circuit that returns a 2-vector.' @qml.qnode(qubit_device_2_wires, interface='tf') def circuit(dummy1, array, dummy2): qml.RY((0.5 * array[(0, 1)]), wires=0) qml.RY(((- 0.5) * array[(1, 1)]), wires=0) qml.RY(array[(1, 0)], wires=1) return (qml.expval(qml.PauliX(0)), qml.expval(qml.PauliX(1))) grad_target = (np.array(1.0), np.array([[0.5, 0.43879, 0], [0, (- 0.43879), 0]]), np.array((- 0.4))) cost_target = 1.03257 args = (Variable(0.46), Variable([[2.0, 3.0, 0.3], [7.0, 4.0, 2.1]]), Variable((- 0.13))) def cost(x, array, y): c = tf.cast(circuit(tf.constant(0.111), array, tf.constant(4.5)), tf.float32) c = c[0] return (((c + (0.5 * array[(0, 0)])) + x) - (0.4 * y)) with tf.GradientTape() as tape: cost_res = cost(*args) grad_res = np.array([i.numpy() for i in tape.gradient(cost_res, [args[0], args[2]])]) assert np.allclose(cost_res.numpy(), cost_target, atol=tol, rtol=0) assert np.allclose(grad_res, np.fromiter(grad_target[::2], dtype=np.float32), atol=tol, rtol=0)
1,552,991,200,751,960,000
Test that QNode can take arrays as input arguments, and that they interact properly with TensorFlow Test case for a circuit that returns a 2-vector.
artifacts/old_dataset_versions/minimal_commits/pennylane/pennylane#385/after/test_tf.py
test_qnode_array_parameters_2_vector_return
MattePalte/Bugs-Quantum-Computing-Platforms
python
def test_qnode_array_parameters_2_vector_return(self, qubit_device_2_wires, tol): 'Test that QNode can take arrays as input arguments, and that they interact properly with TensorFlow\n Test case for a circuit that returns a 2-vector.' @qml.qnode(qubit_device_2_wires, interface='tf') def circuit(dummy1, array, dummy2): qml.RY((0.5 * array[(0, 1)]), wires=0) qml.RY(((- 0.5) * array[(1, 1)]), wires=0) qml.RY(array[(1, 0)], wires=1) return (qml.expval(qml.PauliX(0)), qml.expval(qml.PauliX(1))) grad_target = (np.array(1.0), np.array([[0.5, 0.43879, 0], [0, (- 0.43879), 0]]), np.array((- 0.4))) cost_target = 1.03257 args = (Variable(0.46), Variable([[2.0, 3.0, 0.3], [7.0, 4.0, 2.1]]), Variable((- 0.13))) def cost(x, array, y): c = tf.cast(circuit(tf.constant(0.111), array, tf.constant(4.5)), tf.float32) c = c[0] return (((c + (0.5 * array[(0, 0)])) + x) - (0.4 * y)) with tf.GradientTape() as tape: cost_res = cost(*args) grad_res = np.array([i.numpy() for i in tape.gradient(cost_res, [args[0], args[2]])]) assert np.allclose(cost_res.numpy(), cost_target, atol=tol, rtol=0) assert np.allclose(grad_res, np.fromiter(grad_target[::2], dtype=np.float32), atol=tol, rtol=0)
def test_array_parameters_evaluate(self, qubit_device_2_wires, tol): 'Test that array parameters gives same result as positional arguments.' (a, b, c) = (tf.constant(0.5), tf.constant(0.54), tf.constant(0.3)) def ansatz(x, y, z): qml.QubitStateVector((np.array([1, 0, 1, 1]) / np.sqrt(3)), wires=[0, 1]) qml.Rot(x, y, z, wires=0) qml.CNOT(wires=[0, 1]) return (qml.expval(qml.PauliZ(0)), qml.expval(qml.PauliY(1))) @qml.qnode(qubit_device_2_wires, interface='tf') def circuit1(x, y, z): return ansatz(x, y, z) @qml.qnode(qubit_device_2_wires, interface='tf') def circuit2(x, array): return ansatz(x, array[0], array[1]) @qml.qnode(qubit_device_2_wires, interface='tf') def circuit3(array): return ansatz(*array) positional_res = circuit1(a, b, c) array_res1 = circuit2(a, Variable([b, c])) array_res2 = circuit3(Variable([a, b, c])) assert np.allclose(positional_res.numpy(), array_res1.numpy(), atol=tol, rtol=0) assert np.allclose(positional_res.numpy(), array_res2.numpy(), atol=tol, rtol=0)
-4,144,648,632,722,298,400
Test that array parameters gives same result as positional arguments.
artifacts/old_dataset_versions/minimal_commits/pennylane/pennylane#385/after/test_tf.py
test_array_parameters_evaluate
MattePalte/Bugs-Quantum-Computing-Platforms
python
def test_array_parameters_evaluate(self, qubit_device_2_wires, tol): (a, b, c) = (tf.constant(0.5), tf.constant(0.54), tf.constant(0.3)) def ansatz(x, y, z): qml.QubitStateVector((np.array([1, 0, 1, 1]) / np.sqrt(3)), wires=[0, 1]) qml.Rot(x, y, z, wires=0) qml.CNOT(wires=[0, 1]) return (qml.expval(qml.PauliZ(0)), qml.expval(qml.PauliY(1))) @qml.qnode(qubit_device_2_wires, interface='tf') def circuit1(x, y, z): return ansatz(x, y, z) @qml.qnode(qubit_device_2_wires, interface='tf') def circuit2(x, array): return ansatz(x, array[0], array[1]) @qml.qnode(qubit_device_2_wires, interface='tf') def circuit3(array): return ansatz(*array) positional_res = circuit1(a, b, c) array_res1 = circuit2(a, Variable([b, c])) array_res2 = circuit3(Variable([a, b, c])) assert np.allclose(positional_res.numpy(), array_res1.numpy(), atol=tol, rtol=0) assert np.allclose(positional_res.numpy(), array_res2.numpy(), atol=tol, rtol=0)
def test_multiple_expectation_different_wires(self, qubit_device_2_wires, tol): 'Tests that qnodes return multiple expectation values.' (a, b, c) = (Variable(0.5), Variable(0.54), Variable(0.3)) @qml.qnode(qubit_device_2_wires, interface='tf') def circuit(x, y, z): qml.RX(x, wires=[0]) qml.RZ(y, wires=[0]) qml.CNOT(wires=[0, 1]) qml.RY(y, wires=[0]) qml.RX(z, wires=[0]) return (qml.expval(qml.PauliY(0)), qml.expval(qml.PauliZ(1))) res = circuit(a, b, c) out_state = (((((np.kron(Rotx(c.numpy()), I) @ np.kron(Roty(b.numpy()), I)) @ CNOT) @ np.kron(Rotz(b.numpy()), I)) @ np.kron(Rotx(a.numpy()), I)) @ np.array([1, 0, 0, 0])) ex0 = np.vdot(out_state, (np.kron(Y, I) @ out_state)) ex1 = np.vdot(out_state, (np.kron(I, Z) @ out_state)) ex = np.array([ex0, ex1]) assert np.allclose(ex, res.numpy(), atol=tol, rtol=0)
7,911,533,964,854,274,000
Tests that qnodes return multiple expectation values.
artifacts/old_dataset_versions/minimal_commits/pennylane/pennylane#385/after/test_tf.py
test_multiple_expectation_different_wires
MattePalte/Bugs-Quantum-Computing-Platforms
python
def test_multiple_expectation_different_wires(self, qubit_device_2_wires, tol): (a, b, c) = (Variable(0.5), Variable(0.54), Variable(0.3)) @qml.qnode(qubit_device_2_wires, interface='tf') def circuit(x, y, z): qml.RX(x, wires=[0]) qml.RZ(y, wires=[0]) qml.CNOT(wires=[0, 1]) qml.RY(y, wires=[0]) qml.RX(z, wires=[0]) return (qml.expval(qml.PauliY(0)), qml.expval(qml.PauliZ(1))) res = circuit(a, b, c) out_state = (((((np.kron(Rotx(c.numpy()), I) @ np.kron(Roty(b.numpy()), I)) @ CNOT) @ np.kron(Rotz(b.numpy()), I)) @ np.kron(Rotx(a.numpy()), I)) @ np.array([1, 0, 0, 0])) ex0 = np.vdot(out_state, (np.kron(Y, I) @ out_state)) ex1 = np.vdot(out_state, (np.kron(I, Z) @ out_state)) ex = np.array([ex0, ex1]) assert np.allclose(ex, res.numpy(), atol=tol, rtol=0)
def test_multiple_keywordargs_used(self, qubit_device_2_wires, tol): 'Tests that qnodes use multiple keyword arguments.' @qml.qnode(qubit_device_2_wires, interface='tf') def circuit(w, x=None, y=None): qml.RX(x, wires=[0]) qml.RX(y, wires=[1]) return (qml.expval(qml.PauliZ(0)), qml.expval(qml.PauliZ(1))) c = circuit(tf.constant(1.0), x=np.pi, y=np.pi) assert np.allclose(c.numpy(), [(- 1.0), (- 1.0)], atol=tol, rtol=0)
-2,299,123,470,347,842,000
Tests that qnodes use multiple keyword arguments.
artifacts/old_dataset_versions/minimal_commits/pennylane/pennylane#385/after/test_tf.py
test_multiple_keywordargs_used
MattePalte/Bugs-Quantum-Computing-Platforms
python
def test_multiple_keywordargs_used(self, qubit_device_2_wires, tol): @qml.qnode(qubit_device_2_wires, interface='tf') def circuit(w, x=None, y=None): qml.RX(x, wires=[0]) qml.RX(y, wires=[1]) return (qml.expval(qml.PauliZ(0)), qml.expval(qml.PauliZ(1))) c = circuit(tf.constant(1.0), x=np.pi, y=np.pi) assert np.allclose(c.numpy(), [(- 1.0), (- 1.0)], atol=tol, rtol=0)
def test_multidimensional_keywordargs_used(self, qubit_device_2_wires, tol): 'Tests that qnodes use multi-dimensional keyword arguments.' def circuit(w, x=None): qml.RX(x[0], wires=[0]) qml.RX(x[1], wires=[1]) return (qml.expval(qml.PauliZ(0)), qml.expval(qml.PauliZ(1))) circuit = qml.QNode(circuit, qubit_device_2_wires).to_tf() c = circuit(tf.constant(1.0), x=[np.pi, np.pi]) assert np.allclose(c.numpy(), [(- 1.0), (- 1.0)], atol=tol, rtol=0)
-4,651,934,759,475,773,000
Tests that qnodes use multi-dimensional keyword arguments.
artifacts/old_dataset_versions/minimal_commits/pennylane/pennylane#385/after/test_tf.py
test_multidimensional_keywordargs_used
MattePalte/Bugs-Quantum-Computing-Platforms
python
def test_multidimensional_keywordargs_used(self, qubit_device_2_wires, tol): def circuit(w, x=None): qml.RX(x[0], wires=[0]) qml.RX(x[1], wires=[1]) return (qml.expval(qml.PauliZ(0)), qml.expval(qml.PauliZ(1))) circuit = qml.QNode(circuit, qubit_device_2_wires).to_tf() c = circuit(tf.constant(1.0), x=[np.pi, np.pi]) assert np.allclose(c.numpy(), [(- 1.0), (- 1.0)], atol=tol, rtol=0)
def test_keywordargs_for_wires(self, qubit_device_2_wires, tol): 'Tests that wires can be passed as keyword arguments.' default_q = 0 def circuit(x, q=default_q): qml.RY(x, wires=0) return qml.expval(qml.PauliZ(q)) circuit = qml.QNode(circuit, qubit_device_2_wires).to_tf() c = circuit(tf.constant(np.pi), q=1) assert np.allclose(c, 1.0, atol=tol, rtol=0) c = circuit(tf.constant(np.pi)) assert np.allclose(c.numpy(), (- 1.0), atol=tol, rtol=0)
8,595,178,969,222,835,000
Tests that wires can be passed as keyword arguments.
artifacts/old_dataset_versions/minimal_commits/pennylane/pennylane#385/after/test_tf.py
test_keywordargs_for_wires
MattePalte/Bugs-Quantum-Computing-Platforms
python
def test_keywordargs_for_wires(self, qubit_device_2_wires, tol): default_q = 0 def circuit(x, q=default_q): qml.RY(x, wires=0) return qml.expval(qml.PauliZ(q)) circuit = qml.QNode(circuit, qubit_device_2_wires).to_tf() c = circuit(tf.constant(np.pi), q=1) assert np.allclose(c, 1.0, atol=tol, rtol=0) c = circuit(tf.constant(np.pi)) assert np.allclose(c.numpy(), (- 1.0), atol=tol, rtol=0)
def test_keywordargs_used(self, qubit_device_1_wire, tol): 'Tests that qnodes use keyword arguments.' def circuit(w, x=None): qml.RX(x, wires=[0]) return qml.expval(qml.PauliZ(0)) circuit = qml.QNode(circuit, qubit_device_1_wire).to_tf() c = circuit(tf.constant(1.0), x=np.pi) assert np.allclose(c.numpy(), (- 1.0), atol=tol, rtol=0)
6,138,958,088,680,453,000
Tests that qnodes use keyword arguments.
artifacts/old_dataset_versions/minimal_commits/pennylane/pennylane#385/after/test_tf.py
test_keywordargs_used
MattePalte/Bugs-Quantum-Computing-Platforms
python
def test_keywordargs_used(self, qubit_device_1_wire, tol): def circuit(w, x=None): qml.RX(x, wires=[0]) return qml.expval(qml.PauliZ(0)) circuit = qml.QNode(circuit, qubit_device_1_wire).to_tf() c = circuit(tf.constant(1.0), x=np.pi) assert np.allclose(c.numpy(), (- 1.0), atol=tol, rtol=0)
def test_mixture_numpy_tensors(self, qubit_device_2_wires, tol): 'Tests that qnodes work with python types and tensors.' @qml.qnode(qubit_device_2_wires, interface='tf') def circuit(w, x, y): qml.RX(x, wires=[0]) qml.RX(y, wires=[1]) return (qml.expval(qml.PauliZ(0)), qml.expval(qml.PauliZ(1))) c = circuit(tf.constant(1.0), np.pi, np.pi).numpy() assert np.allclose(c, [(- 1.0), (- 1.0)], atol=tol, rtol=0)
6,131,238,922,305,531,000
Tests that qnodes work with python types and tensors.
artifacts/old_dataset_versions/minimal_commits/pennylane/pennylane#385/after/test_tf.py
test_mixture_numpy_tensors
MattePalte/Bugs-Quantum-Computing-Platforms
python
def test_mixture_numpy_tensors(self, qubit_device_2_wires, tol): @qml.qnode(qubit_device_2_wires, interface='tf') def circuit(w, x, y): qml.RX(x, wires=[0]) qml.RX(y, wires=[1]) return (qml.expval(qml.PauliZ(0)), qml.expval(qml.PauliZ(1))) c = circuit(tf.constant(1.0), np.pi, np.pi).numpy() assert np.allclose(c, [(- 1.0), (- 1.0)], atol=tol, rtol=0)
def test_keywordarg_updated_in_multiple_calls(self, qubit_device_2_wires): 'Tests that qnodes update keyword arguments in consecutive calls.' def circuit(w, x=None): qml.RX(w, wires=[0]) qml.RX(x, wires=[1]) return (qml.expval(qml.PauliZ(0)), qml.expval(qml.PauliZ(1))) circuit = qml.QNode(circuit, qubit_device_2_wires).to_tf() c1 = circuit(tf.constant(0.1), x=tf.constant(0.0)) c2 = circuit(tf.constant(0.1), x=np.pi) assert (c1[1] != c2[1])
195,756,045,234,336,300
Tests that qnodes update keyword arguments in consecutive calls.
artifacts/old_dataset_versions/minimal_commits/pennylane/pennylane#385/after/test_tf.py
test_keywordarg_updated_in_multiple_calls
MattePalte/Bugs-Quantum-Computing-Platforms
python
def test_keywordarg_updated_in_multiple_calls(self, qubit_device_2_wires): def circuit(w, x=None): qml.RX(w, wires=[0]) qml.RX(x, wires=[1]) return (qml.expval(qml.PauliZ(0)), qml.expval(qml.PauliZ(1))) circuit = qml.QNode(circuit, qubit_device_2_wires).to_tf() c1 = circuit(tf.constant(0.1), x=tf.constant(0.0)) c2 = circuit(tf.constant(0.1), x=np.pi) assert (c1[1] != c2[1])