body
stringlengths
26
98.2k
body_hash
int64
-9,222,864,604,528,158,000
9,221,803,474B
docstring
stringlengths
1
16.8k
path
stringlengths
5
230
name
stringlengths
1
96
repository_name
stringlengths
7
89
lang
stringclasses
1 value
body_without_docstring
stringlengths
20
98.2k
def delete_namespaced_job(self, name, namespace, **kwargs): '\n delete a Job\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.delete_namespaced_job(name, namespace, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str name: name of the Job (required)\n :param str namespace: object name and auth scope, such as for teams and projects (required)\n :param str pretty: If \'true\', then the output is pretty printed.\n :param V1DeleteOptions body:\n :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed\n :param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately.\n :param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the "orphan" finalizer will be added to/removed from the object\'s finalizers list. Either this field or PropagationPolicy may be set, but not both.\n :param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: \'Orphan\' - orphan the dependents; \'Background\' - allow the garbage collector to delete the dependents in the background; \'Foreground\' - a cascading policy that deletes all dependents in the foreground.\n :return: V1Status\n If the method is called asynchronously,\n returns the request thread.\n ' kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.delete_namespaced_job_with_http_info(name, namespace, **kwargs) else: data = self.delete_namespaced_job_with_http_info(name, namespace, **kwargs) return data
-2,694,559,309,886,043,000
delete a Job This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_namespaced_job(name, namespace, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the Job (required) :param str namespace: object name and auth scope, such as for teams and projects (required) :param str pretty: If 'true', then the output is pretty printed. :param V1DeleteOptions body: :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. :param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the "orphan" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. :param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. :return: V1Status If the method is called asynchronously, returns the request thread.
kubernetes/client/apis/batch_v1_api.py
delete_namespaced_job
MiaoRachelYu/python
python
def delete_namespaced_job(self, name, namespace, **kwargs): '\n delete a Job\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.delete_namespaced_job(name, namespace, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str name: name of the Job (required)\n :param str namespace: object name and auth scope, such as for teams and projects (required)\n :param str pretty: If \'true\', then the output is pretty printed.\n :param V1DeleteOptions body:\n :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed\n :param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately.\n :param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the "orphan" finalizer will be added to/removed from the object\'s finalizers list. Either this field or PropagationPolicy may be set, but not both.\n :param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: \'Orphan\' - orphan the dependents; \'Background\' - allow the garbage collector to delete the dependents in the background; \'Foreground\' - a cascading policy that deletes all dependents in the foreground.\n :return: V1Status\n If the method is called asynchronously,\n returns the request thread.\n ' kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.delete_namespaced_job_with_http_info(name, namespace, **kwargs) else: data = self.delete_namespaced_job_with_http_info(name, namespace, **kwargs) return data
def delete_namespaced_job_with_http_info(self, name, namespace, **kwargs): '\n delete a Job\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.delete_namespaced_job_with_http_info(name, namespace, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str name: name of the Job (required)\n :param str namespace: object name and auth scope, such as for teams and projects (required)\n :param str pretty: If \'true\', then the output is pretty printed.\n :param V1DeleteOptions body:\n :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed\n :param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately.\n :param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the "orphan" finalizer will be added to/removed from the object\'s finalizers list. Either this field or PropagationPolicy may be set, but not both.\n :param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: \'Orphan\' - orphan the dependents; \'Background\' - allow the garbage collector to delete the dependents in the background; \'Foreground\' - a cascading policy that deletes all dependents in the foreground.\n :return: V1Status\n If the method is called asynchronously,\n returns the request thread.\n ' all_params = ['name', 'namespace', 'pretty', 'body', 'dry_run', 'grace_period_seconds', 'orphan_dependents', 'propagation_policy'] all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for (key, val) in iteritems(params['kwargs']): if (key not in all_params): raise TypeError(("Got an unexpected keyword argument '%s' to method delete_namespaced_job" % key)) params[key] = val del params['kwargs'] if (('name' not in params) or (params['name'] is None)): raise ValueError('Missing the required parameter `name` when calling `delete_namespaced_job`') if (('namespace' not in params) or (params['namespace'] is None)): raise ValueError('Missing the required parameter `namespace` when calling `delete_namespaced_job`') collection_formats = {} path_params = {} if ('name' in params): path_params['name'] = params['name'] if ('namespace' in params): path_params['namespace'] = params['namespace'] query_params = [] if ('pretty' in params): query_params.append(('pretty', params['pretty'])) if ('dry_run' in params): query_params.append(('dryRun', params['dry_run'])) if ('grace_period_seconds' in params): query_params.append(('gracePeriodSeconds', params['grace_period_seconds'])) if ('orphan_dependents' in params): query_params.append(('orphanDependents', params['orphan_dependents'])) if ('propagation_policy' in params): query_params.append(('propagationPolicy', params['propagation_policy'])) header_params = {} form_params = [] local_var_files = {} body_params = None if ('body' in params): body_params = params['body'] header_params['Accept'] = self.api_client.select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) header_params['Content-Type'] = self.api_client.select_header_content_type(['*/*']) auth_settings = ['BearerToken'] return self.api_client.call_api('/apis/batch/v1/namespaces/{namespace}/jobs/{name}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1Status', auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
-8,098,088,110,390,845,000
delete a Job This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_namespaced_job_with_http_info(name, namespace, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the Job (required) :param str namespace: object name and auth scope, such as for teams and projects (required) :param str pretty: If 'true', then the output is pretty printed. :param V1DeleteOptions body: :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. :param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the "orphan" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. :param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. :return: V1Status If the method is called asynchronously, returns the request thread.
kubernetes/client/apis/batch_v1_api.py
delete_namespaced_job_with_http_info
MiaoRachelYu/python
python
def delete_namespaced_job_with_http_info(self, name, namespace, **kwargs): '\n delete a Job\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.delete_namespaced_job_with_http_info(name, namespace, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str name: name of the Job (required)\n :param str namespace: object name and auth scope, such as for teams and projects (required)\n :param str pretty: If \'true\', then the output is pretty printed.\n :param V1DeleteOptions body:\n :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed\n :param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately.\n :param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the "orphan" finalizer will be added to/removed from the object\'s finalizers list. Either this field or PropagationPolicy may be set, but not both.\n :param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: \'Orphan\' - orphan the dependents; \'Background\' - allow the garbage collector to delete the dependents in the background; \'Foreground\' - a cascading policy that deletes all dependents in the foreground.\n :return: V1Status\n If the method is called asynchronously,\n returns the request thread.\n ' all_params = ['name', 'namespace', 'pretty', 'body', 'dry_run', 'grace_period_seconds', 'orphan_dependents', 'propagation_policy'] all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for (key, val) in iteritems(params['kwargs']): if (key not in all_params): raise TypeError(("Got an unexpected keyword argument '%s' to method delete_namespaced_job" % key)) params[key] = val del params['kwargs'] if (('name' not in params) or (params['name'] is None)): raise ValueError('Missing the required parameter `name` when calling `delete_namespaced_job`') if (('namespace' not in params) or (params['namespace'] is None)): raise ValueError('Missing the required parameter `namespace` when calling `delete_namespaced_job`') collection_formats = {} path_params = {} if ('name' in params): path_params['name'] = params['name'] if ('namespace' in params): path_params['namespace'] = params['namespace'] query_params = [] if ('pretty' in params): query_params.append(('pretty', params['pretty'])) if ('dry_run' in params): query_params.append(('dryRun', params['dry_run'])) if ('grace_period_seconds' in params): query_params.append(('gracePeriodSeconds', params['grace_period_seconds'])) if ('orphan_dependents' in params): query_params.append(('orphanDependents', params['orphan_dependents'])) if ('propagation_policy' in params): query_params.append(('propagationPolicy', params['propagation_policy'])) header_params = {} form_params = [] local_var_files = {} body_params = None if ('body' in params): body_params = params['body'] header_params['Accept'] = self.api_client.select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) header_params['Content-Type'] = self.api_client.select_header_content_type(['*/*']) auth_settings = ['BearerToken'] return self.api_client.call_api('/apis/batch/v1/namespaces/{namespace}/jobs/{name}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1Status', auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
def get_api_resources(self, **kwargs): '\n get available resources\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.get_api_resources(async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :return: V1APIResourceList\n If the method is called asynchronously,\n returns the request thread.\n ' kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_api_resources_with_http_info(**kwargs) else: data = self.get_api_resources_with_http_info(**kwargs) return data
-340,951,253,249,199,000
get available resources This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_api_resources(async_req=True) >>> result = thread.get() :param async_req bool :return: V1APIResourceList If the method is called asynchronously, returns the request thread.
kubernetes/client/apis/batch_v1_api.py
get_api_resources
MiaoRachelYu/python
python
def get_api_resources(self, **kwargs): '\n get available resources\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.get_api_resources(async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :return: V1APIResourceList\n If the method is called asynchronously,\n returns the request thread.\n ' kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_api_resources_with_http_info(**kwargs) else: data = self.get_api_resources_with_http_info(**kwargs) return data
def get_api_resources_with_http_info(self, **kwargs): '\n get available resources\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.get_api_resources_with_http_info(async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :return: V1APIResourceList\n If the method is called asynchronously,\n returns the request thread.\n ' all_params = [] all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for (key, val) in iteritems(params['kwargs']): if (key not in all_params): raise TypeError(("Got an unexpected keyword argument '%s' to method get_api_resources" % key)) params[key] = val del params['kwargs'] collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None header_params['Accept'] = self.api_client.select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) header_params['Content-Type'] = self.api_client.select_header_content_type(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) auth_settings = ['BearerToken'] return self.api_client.call_api('/apis/batch/v1/', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1APIResourceList', auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
-5,848,708,895,557,926,000
get available resources This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_api_resources_with_http_info(async_req=True) >>> result = thread.get() :param async_req bool :return: V1APIResourceList If the method is called asynchronously, returns the request thread.
kubernetes/client/apis/batch_v1_api.py
get_api_resources_with_http_info
MiaoRachelYu/python
python
def get_api_resources_with_http_info(self, **kwargs): '\n get available resources\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.get_api_resources_with_http_info(async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :return: V1APIResourceList\n If the method is called asynchronously,\n returns the request thread.\n ' all_params = [] all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for (key, val) in iteritems(params['kwargs']): if (key not in all_params): raise TypeError(("Got an unexpected keyword argument '%s' to method get_api_resources" % key)) params[key] = val del params['kwargs'] collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None header_params['Accept'] = self.api_client.select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) header_params['Content-Type'] = self.api_client.select_header_content_type(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) auth_settings = ['BearerToken'] return self.api_client.call_api('/apis/batch/v1/', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1APIResourceList', auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
def list_job_for_all_namespaces(self, **kwargs): '\n list or watch objects of kind Job\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.list_job_for_all_namespaces(async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the "next key". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.\n :param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.\n :param bool include_uninitialized: If true, partially initialized resources are included in the response.\n :param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.\n :param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.\n :param str pretty: If \'true\', then the output is pretty printed.\n :param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it\'s 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.\n :param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity.\n :param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.\n :return: V1JobList\n If the method is called asynchronously,\n returns the request thread.\n ' kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.list_job_for_all_namespaces_with_http_info(**kwargs) else: data = self.list_job_for_all_namespaces_with_http_info(**kwargs) return data
-4,896,911,224,072,507,000
list or watch objects of kind Job This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.list_job_for_all_namespaces(async_req=True) >>> result = thread.get() :param async_req bool :param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the "next key". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. :param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. :param bool include_uninitialized: If true, partially initialized resources are included in the response. :param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. :param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. :param str pretty: If 'true', then the output is pretty printed. :param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv. :param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. :param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. :return: V1JobList If the method is called asynchronously, returns the request thread.
kubernetes/client/apis/batch_v1_api.py
list_job_for_all_namespaces
MiaoRachelYu/python
python
def list_job_for_all_namespaces(self, **kwargs): '\n list or watch objects of kind Job\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.list_job_for_all_namespaces(async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the "next key". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.\n :param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.\n :param bool include_uninitialized: If true, partially initialized resources are included in the response.\n :param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.\n :param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.\n :param str pretty: If \'true\', then the output is pretty printed.\n :param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it\'s 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.\n :param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity.\n :param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.\n :return: V1JobList\n If the method is called asynchronously,\n returns the request thread.\n ' kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.list_job_for_all_namespaces_with_http_info(**kwargs) else: data = self.list_job_for_all_namespaces_with_http_info(**kwargs) return data
def list_job_for_all_namespaces_with_http_info(self, **kwargs): '\n list or watch objects of kind Job\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.list_job_for_all_namespaces_with_http_info(async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the "next key". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.\n :param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.\n :param bool include_uninitialized: If true, partially initialized resources are included in the response.\n :param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.\n :param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.\n :param str pretty: If \'true\', then the output is pretty printed.\n :param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it\'s 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.\n :param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity.\n :param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.\n :return: V1JobList\n If the method is called asynchronously,\n returns the request thread.\n ' all_params = ['_continue', 'field_selector', 'include_uninitialized', 'label_selector', 'limit', 'pretty', 'resource_version', 'timeout_seconds', 'watch'] all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for (key, val) in iteritems(params['kwargs']): if (key not in all_params): raise TypeError(("Got an unexpected keyword argument '%s' to method list_job_for_all_namespaces" % key)) params[key] = val del params['kwargs'] collection_formats = {} path_params = {} query_params = [] if ('_continue' in params): query_params.append(('continue', params['_continue'])) if ('field_selector' in params): query_params.append(('fieldSelector', params['field_selector'])) if ('include_uninitialized' in params): query_params.append(('includeUninitialized', params['include_uninitialized'])) if ('label_selector' in params): query_params.append(('labelSelector', params['label_selector'])) if ('limit' in params): query_params.append(('limit', params['limit'])) if ('pretty' in params): query_params.append(('pretty', params['pretty'])) if ('resource_version' in params): query_params.append(('resourceVersion', params['resource_version'])) if ('timeout_seconds' in params): query_params.append(('timeoutSeconds', params['timeout_seconds'])) if ('watch' in params): query_params.append(('watch', params['watch'])) header_params = {} form_params = [] local_var_files = {} body_params = None header_params['Accept'] = self.api_client.select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf', 'application/json;stream=watch', 'application/vnd.kubernetes.protobuf;stream=watch']) header_params['Content-Type'] = self.api_client.select_header_content_type(['*/*']) auth_settings = ['BearerToken'] return self.api_client.call_api('/apis/batch/v1/jobs', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1JobList', auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
-7,553,818,778,255,635,000
list or watch objects of kind Job This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.list_job_for_all_namespaces_with_http_info(async_req=True) >>> result = thread.get() :param async_req bool :param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the "next key". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. :param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. :param bool include_uninitialized: If true, partially initialized resources are included in the response. :param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. :param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. :param str pretty: If 'true', then the output is pretty printed. :param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv. :param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. :param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. :return: V1JobList If the method is called asynchronously, returns the request thread.
kubernetes/client/apis/batch_v1_api.py
list_job_for_all_namespaces_with_http_info
MiaoRachelYu/python
python
def list_job_for_all_namespaces_with_http_info(self, **kwargs): '\n list or watch objects of kind Job\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.list_job_for_all_namespaces_with_http_info(async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the "next key". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.\n :param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.\n :param bool include_uninitialized: If true, partially initialized resources are included in the response.\n :param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.\n :param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.\n :param str pretty: If \'true\', then the output is pretty printed.\n :param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it\'s 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.\n :param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity.\n :param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.\n :return: V1JobList\n If the method is called asynchronously,\n returns the request thread.\n ' all_params = ['_continue', 'field_selector', 'include_uninitialized', 'label_selector', 'limit', 'pretty', 'resource_version', 'timeout_seconds', 'watch'] all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for (key, val) in iteritems(params['kwargs']): if (key not in all_params): raise TypeError(("Got an unexpected keyword argument '%s' to method list_job_for_all_namespaces" % key)) params[key] = val del params['kwargs'] collection_formats = {} path_params = {} query_params = [] if ('_continue' in params): query_params.append(('continue', params['_continue'])) if ('field_selector' in params): query_params.append(('fieldSelector', params['field_selector'])) if ('include_uninitialized' in params): query_params.append(('includeUninitialized', params['include_uninitialized'])) if ('label_selector' in params): query_params.append(('labelSelector', params['label_selector'])) if ('limit' in params): query_params.append(('limit', params['limit'])) if ('pretty' in params): query_params.append(('pretty', params['pretty'])) if ('resource_version' in params): query_params.append(('resourceVersion', params['resource_version'])) if ('timeout_seconds' in params): query_params.append(('timeoutSeconds', params['timeout_seconds'])) if ('watch' in params): query_params.append(('watch', params['watch'])) header_params = {} form_params = [] local_var_files = {} body_params = None header_params['Accept'] = self.api_client.select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf', 'application/json;stream=watch', 'application/vnd.kubernetes.protobuf;stream=watch']) header_params['Content-Type'] = self.api_client.select_header_content_type(['*/*']) auth_settings = ['BearerToken'] return self.api_client.call_api('/apis/batch/v1/jobs', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1JobList', auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
def list_namespaced_job(self, namespace, **kwargs): '\n list or watch objects of kind Job\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.list_namespaced_job(namespace, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str namespace: object name and auth scope, such as for teams and projects (required)\n :param bool include_uninitialized: If true, partially initialized resources are included in the response.\n :param str pretty: If \'true\', then the output is pretty printed.\n :param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the "next key". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.\n :param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.\n :param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.\n :param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.\n :param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it\'s 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.\n :param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity.\n :param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.\n :return: V1JobList\n If the method is called asynchronously,\n returns the request thread.\n ' kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.list_namespaced_job_with_http_info(namespace, **kwargs) else: data = self.list_namespaced_job_with_http_info(namespace, **kwargs) return data
-8,123,203,867,841,701,000
list or watch objects of kind Job This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.list_namespaced_job(namespace, async_req=True) >>> result = thread.get() :param async_req bool :param str namespace: object name and auth scope, such as for teams and projects (required) :param bool include_uninitialized: If true, partially initialized resources are included in the response. :param str pretty: If 'true', then the output is pretty printed. :param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the "next key". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. :param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. :param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. :param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. :param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv. :param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. :param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. :return: V1JobList If the method is called asynchronously, returns the request thread.
kubernetes/client/apis/batch_v1_api.py
list_namespaced_job
MiaoRachelYu/python
python
def list_namespaced_job(self, namespace, **kwargs): '\n list or watch objects of kind Job\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.list_namespaced_job(namespace, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str namespace: object name and auth scope, such as for teams and projects (required)\n :param bool include_uninitialized: If true, partially initialized resources are included in the response.\n :param str pretty: If \'true\', then the output is pretty printed.\n :param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the "next key". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.\n :param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.\n :param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.\n :param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.\n :param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it\'s 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.\n :param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity.\n :param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.\n :return: V1JobList\n If the method is called asynchronously,\n returns the request thread.\n ' kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.list_namespaced_job_with_http_info(namespace, **kwargs) else: data = self.list_namespaced_job_with_http_info(namespace, **kwargs) return data
def list_namespaced_job_with_http_info(self, namespace, **kwargs): '\n list or watch objects of kind Job\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.list_namespaced_job_with_http_info(namespace, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str namespace: object name and auth scope, such as for teams and projects (required)\n :param bool include_uninitialized: If true, partially initialized resources are included in the response.\n :param str pretty: If \'true\', then the output is pretty printed.\n :param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the "next key". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.\n :param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.\n :param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.\n :param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.\n :param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it\'s 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.\n :param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity.\n :param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.\n :return: V1JobList\n If the method is called asynchronously,\n returns the request thread.\n ' all_params = ['namespace', 'include_uninitialized', 'pretty', '_continue', 'field_selector', 'label_selector', 'limit', 'resource_version', 'timeout_seconds', 'watch'] all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for (key, val) in iteritems(params['kwargs']): if (key not in all_params): raise TypeError(("Got an unexpected keyword argument '%s' to method list_namespaced_job" % key)) params[key] = val del params['kwargs'] if (('namespace' not in params) or (params['namespace'] is None)): raise ValueError('Missing the required parameter `namespace` when calling `list_namespaced_job`') collection_formats = {} path_params = {} if ('namespace' in params): path_params['namespace'] = params['namespace'] query_params = [] if ('include_uninitialized' in params): query_params.append(('includeUninitialized', params['include_uninitialized'])) if ('pretty' in params): query_params.append(('pretty', params['pretty'])) if ('_continue' in params): query_params.append(('continue', params['_continue'])) if ('field_selector' in params): query_params.append(('fieldSelector', params['field_selector'])) if ('label_selector' in params): query_params.append(('labelSelector', params['label_selector'])) if ('limit' in params): query_params.append(('limit', params['limit'])) if ('resource_version' in params): query_params.append(('resourceVersion', params['resource_version'])) if ('timeout_seconds' in params): query_params.append(('timeoutSeconds', params['timeout_seconds'])) if ('watch' in params): query_params.append(('watch', params['watch'])) header_params = {} form_params = [] local_var_files = {} body_params = None header_params['Accept'] = self.api_client.select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf', 'application/json;stream=watch', 'application/vnd.kubernetes.protobuf;stream=watch']) header_params['Content-Type'] = self.api_client.select_header_content_type(['*/*']) auth_settings = ['BearerToken'] return self.api_client.call_api('/apis/batch/v1/namespaces/{namespace}/jobs', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1JobList', auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
-3,838,151,148,503,006,700
list or watch objects of kind Job This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.list_namespaced_job_with_http_info(namespace, async_req=True) >>> result = thread.get() :param async_req bool :param str namespace: object name and auth scope, such as for teams and projects (required) :param bool include_uninitialized: If true, partially initialized resources are included in the response. :param str pretty: If 'true', then the output is pretty printed. :param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the "next key". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. :param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. :param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. :param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. :param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv. :param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. :param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. :return: V1JobList If the method is called asynchronously, returns the request thread.
kubernetes/client/apis/batch_v1_api.py
list_namespaced_job_with_http_info
MiaoRachelYu/python
python
def list_namespaced_job_with_http_info(self, namespace, **kwargs): '\n list or watch objects of kind Job\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.list_namespaced_job_with_http_info(namespace, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str namespace: object name and auth scope, such as for teams and projects (required)\n :param bool include_uninitialized: If true, partially initialized resources are included in the response.\n :param str pretty: If \'true\', then the output is pretty printed.\n :param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the "next key". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.\n :param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.\n :param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.\n :param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.\n :param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it\'s 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.\n :param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity.\n :param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.\n :return: V1JobList\n If the method is called asynchronously,\n returns the request thread.\n ' all_params = ['namespace', 'include_uninitialized', 'pretty', '_continue', 'field_selector', 'label_selector', 'limit', 'resource_version', 'timeout_seconds', 'watch'] all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for (key, val) in iteritems(params['kwargs']): if (key not in all_params): raise TypeError(("Got an unexpected keyword argument '%s' to method list_namespaced_job" % key)) params[key] = val del params['kwargs'] if (('namespace' not in params) or (params['namespace'] is None)): raise ValueError('Missing the required parameter `namespace` when calling `list_namespaced_job`') collection_formats = {} path_params = {} if ('namespace' in params): path_params['namespace'] = params['namespace'] query_params = [] if ('include_uninitialized' in params): query_params.append(('includeUninitialized', params['include_uninitialized'])) if ('pretty' in params): query_params.append(('pretty', params['pretty'])) if ('_continue' in params): query_params.append(('continue', params['_continue'])) if ('field_selector' in params): query_params.append(('fieldSelector', params['field_selector'])) if ('label_selector' in params): query_params.append(('labelSelector', params['label_selector'])) if ('limit' in params): query_params.append(('limit', params['limit'])) if ('resource_version' in params): query_params.append(('resourceVersion', params['resource_version'])) if ('timeout_seconds' in params): query_params.append(('timeoutSeconds', params['timeout_seconds'])) if ('watch' in params): query_params.append(('watch', params['watch'])) header_params = {} form_params = [] local_var_files = {} body_params = None header_params['Accept'] = self.api_client.select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf', 'application/json;stream=watch', 'application/vnd.kubernetes.protobuf;stream=watch']) header_params['Content-Type'] = self.api_client.select_header_content_type(['*/*']) auth_settings = ['BearerToken'] return self.api_client.call_api('/apis/batch/v1/namespaces/{namespace}/jobs', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1JobList', auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
def patch_namespaced_job(self, name, namespace, body, **kwargs): "\n partially update the specified Job\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.patch_namespaced_job(name, namespace, body, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str name: name of the Job (required)\n :param str namespace: object name and auth scope, such as for teams and projects (required)\n :param object body: (required)\n :param str pretty: If 'true', then the output is pretty printed.\n :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed\n :return: V1Job\n If the method is called asynchronously,\n returns the request thread.\n " kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.patch_namespaced_job_with_http_info(name, namespace, body, **kwargs) else: data = self.patch_namespaced_job_with_http_info(name, namespace, body, **kwargs) return data
675,484,736,674,713,500
partially update the specified Job This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.patch_namespaced_job(name, namespace, body, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the Job (required) :param str namespace: object name and auth scope, such as for teams and projects (required) :param object body: (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :return: V1Job If the method is called asynchronously, returns the request thread.
kubernetes/client/apis/batch_v1_api.py
patch_namespaced_job
MiaoRachelYu/python
python
def patch_namespaced_job(self, name, namespace, body, **kwargs): "\n partially update the specified Job\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.patch_namespaced_job(name, namespace, body, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str name: name of the Job (required)\n :param str namespace: object name and auth scope, such as for teams and projects (required)\n :param object body: (required)\n :param str pretty: If 'true', then the output is pretty printed.\n :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed\n :return: V1Job\n If the method is called asynchronously,\n returns the request thread.\n " kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.patch_namespaced_job_with_http_info(name, namespace, body, **kwargs) else: data = self.patch_namespaced_job_with_http_info(name, namespace, body, **kwargs) return data
def patch_namespaced_job_with_http_info(self, name, namespace, body, **kwargs): "\n partially update the specified Job\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.patch_namespaced_job_with_http_info(name, namespace, body, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str name: name of the Job (required)\n :param str namespace: object name and auth scope, such as for teams and projects (required)\n :param object body: (required)\n :param str pretty: If 'true', then the output is pretty printed.\n :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed\n :return: V1Job\n If the method is called asynchronously,\n returns the request thread.\n " all_params = ['name', 'namespace', 'body', 'pretty', 'dry_run'] all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for (key, val) in iteritems(params['kwargs']): if (key not in all_params): raise TypeError(("Got an unexpected keyword argument '%s' to method patch_namespaced_job" % key)) params[key] = val del params['kwargs'] if (('name' not in params) or (params['name'] is None)): raise ValueError('Missing the required parameter `name` when calling `patch_namespaced_job`') if (('namespace' not in params) or (params['namespace'] is None)): raise ValueError('Missing the required parameter `namespace` when calling `patch_namespaced_job`') if (('body' not in params) or (params['body'] is None)): raise ValueError('Missing the required parameter `body` when calling `patch_namespaced_job`') collection_formats = {} path_params = {} if ('name' in params): path_params['name'] = params['name'] if ('namespace' in params): path_params['namespace'] = params['namespace'] query_params = [] if ('pretty' in params): query_params.append(('pretty', params['pretty'])) if ('dry_run' in params): query_params.append(('dryRun', params['dry_run'])) header_params = {} form_params = [] local_var_files = {} body_params = None if ('body' in params): body_params = params['body'] header_params['Accept'] = self.api_client.select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) header_params['Content-Type'] = self.api_client.select_header_content_type(['application/json-patch+json', 'application/merge-patch+json', 'application/strategic-merge-patch+json']) auth_settings = ['BearerToken'] return self.api_client.call_api('/apis/batch/v1/namespaces/{namespace}/jobs/{name}', 'PATCH', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1Job', auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
-8,757,680,812,355,071,000
partially update the specified Job This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.patch_namespaced_job_with_http_info(name, namespace, body, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the Job (required) :param str namespace: object name and auth scope, such as for teams and projects (required) :param object body: (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :return: V1Job If the method is called asynchronously, returns the request thread.
kubernetes/client/apis/batch_v1_api.py
patch_namespaced_job_with_http_info
MiaoRachelYu/python
python
def patch_namespaced_job_with_http_info(self, name, namespace, body, **kwargs): "\n partially update the specified Job\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.patch_namespaced_job_with_http_info(name, namespace, body, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str name: name of the Job (required)\n :param str namespace: object name and auth scope, such as for teams and projects (required)\n :param object body: (required)\n :param str pretty: If 'true', then the output is pretty printed.\n :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed\n :return: V1Job\n If the method is called asynchronously,\n returns the request thread.\n " all_params = ['name', 'namespace', 'body', 'pretty', 'dry_run'] all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for (key, val) in iteritems(params['kwargs']): if (key not in all_params): raise TypeError(("Got an unexpected keyword argument '%s' to method patch_namespaced_job" % key)) params[key] = val del params['kwargs'] if (('name' not in params) or (params['name'] is None)): raise ValueError('Missing the required parameter `name` when calling `patch_namespaced_job`') if (('namespace' not in params) or (params['namespace'] is None)): raise ValueError('Missing the required parameter `namespace` when calling `patch_namespaced_job`') if (('body' not in params) or (params['body'] is None)): raise ValueError('Missing the required parameter `body` when calling `patch_namespaced_job`') collection_formats = {} path_params = {} if ('name' in params): path_params['name'] = params['name'] if ('namespace' in params): path_params['namespace'] = params['namespace'] query_params = [] if ('pretty' in params): query_params.append(('pretty', params['pretty'])) if ('dry_run' in params): query_params.append(('dryRun', params['dry_run'])) header_params = {} form_params = [] local_var_files = {} body_params = None if ('body' in params): body_params = params['body'] header_params['Accept'] = self.api_client.select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) header_params['Content-Type'] = self.api_client.select_header_content_type(['application/json-patch+json', 'application/merge-patch+json', 'application/strategic-merge-patch+json']) auth_settings = ['BearerToken'] return self.api_client.call_api('/apis/batch/v1/namespaces/{namespace}/jobs/{name}', 'PATCH', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1Job', auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
def patch_namespaced_job_status(self, name, namespace, body, **kwargs): "\n partially update status of the specified Job\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.patch_namespaced_job_status(name, namespace, body, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str name: name of the Job (required)\n :param str namespace: object name and auth scope, such as for teams and projects (required)\n :param object body: (required)\n :param str pretty: If 'true', then the output is pretty printed.\n :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed\n :return: V1Job\n If the method is called asynchronously,\n returns the request thread.\n " kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.patch_namespaced_job_status_with_http_info(name, namespace, body, **kwargs) else: data = self.patch_namespaced_job_status_with_http_info(name, namespace, body, **kwargs) return data
1,816,886,749,744,125,400
partially update status of the specified Job This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.patch_namespaced_job_status(name, namespace, body, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the Job (required) :param str namespace: object name and auth scope, such as for teams and projects (required) :param object body: (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :return: V1Job If the method is called asynchronously, returns the request thread.
kubernetes/client/apis/batch_v1_api.py
patch_namespaced_job_status
MiaoRachelYu/python
python
def patch_namespaced_job_status(self, name, namespace, body, **kwargs): "\n partially update status of the specified Job\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.patch_namespaced_job_status(name, namespace, body, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str name: name of the Job (required)\n :param str namespace: object name and auth scope, such as for teams and projects (required)\n :param object body: (required)\n :param str pretty: If 'true', then the output is pretty printed.\n :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed\n :return: V1Job\n If the method is called asynchronously,\n returns the request thread.\n " kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.patch_namespaced_job_status_with_http_info(name, namespace, body, **kwargs) else: data = self.patch_namespaced_job_status_with_http_info(name, namespace, body, **kwargs) return data
def patch_namespaced_job_status_with_http_info(self, name, namespace, body, **kwargs): "\n partially update status of the specified Job\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.patch_namespaced_job_status_with_http_info(name, namespace, body, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str name: name of the Job (required)\n :param str namespace: object name and auth scope, such as for teams and projects (required)\n :param object body: (required)\n :param str pretty: If 'true', then the output is pretty printed.\n :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed\n :return: V1Job\n If the method is called asynchronously,\n returns the request thread.\n " all_params = ['name', 'namespace', 'body', 'pretty', 'dry_run'] all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for (key, val) in iteritems(params['kwargs']): if (key not in all_params): raise TypeError(("Got an unexpected keyword argument '%s' to method patch_namespaced_job_status" % key)) params[key] = val del params['kwargs'] if (('name' not in params) or (params['name'] is None)): raise ValueError('Missing the required parameter `name` when calling `patch_namespaced_job_status`') if (('namespace' not in params) or (params['namespace'] is None)): raise ValueError('Missing the required parameter `namespace` when calling `patch_namespaced_job_status`') if (('body' not in params) or (params['body'] is None)): raise ValueError('Missing the required parameter `body` when calling `patch_namespaced_job_status`') collection_formats = {} path_params = {} if ('name' in params): path_params['name'] = params['name'] if ('namespace' in params): path_params['namespace'] = params['namespace'] query_params = [] if ('pretty' in params): query_params.append(('pretty', params['pretty'])) if ('dry_run' in params): query_params.append(('dryRun', params['dry_run'])) header_params = {} form_params = [] local_var_files = {} body_params = None if ('body' in params): body_params = params['body'] header_params['Accept'] = self.api_client.select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) header_params['Content-Type'] = self.api_client.select_header_content_type(['application/json-patch+json', 'application/merge-patch+json', 'application/strategic-merge-patch+json']) auth_settings = ['BearerToken'] return self.api_client.call_api('/apis/batch/v1/namespaces/{namespace}/jobs/{name}/status', 'PATCH', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1Job', auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
6,879,412,285,873,147,000
partially update status of the specified Job This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.patch_namespaced_job_status_with_http_info(name, namespace, body, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the Job (required) :param str namespace: object name and auth scope, such as for teams and projects (required) :param object body: (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :return: V1Job If the method is called asynchronously, returns the request thread.
kubernetes/client/apis/batch_v1_api.py
patch_namespaced_job_status_with_http_info
MiaoRachelYu/python
python
def patch_namespaced_job_status_with_http_info(self, name, namespace, body, **kwargs): "\n partially update status of the specified Job\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.patch_namespaced_job_status_with_http_info(name, namespace, body, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str name: name of the Job (required)\n :param str namespace: object name and auth scope, such as for teams and projects (required)\n :param object body: (required)\n :param str pretty: If 'true', then the output is pretty printed.\n :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed\n :return: V1Job\n If the method is called asynchronously,\n returns the request thread.\n " all_params = ['name', 'namespace', 'body', 'pretty', 'dry_run'] all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for (key, val) in iteritems(params['kwargs']): if (key not in all_params): raise TypeError(("Got an unexpected keyword argument '%s' to method patch_namespaced_job_status" % key)) params[key] = val del params['kwargs'] if (('name' not in params) or (params['name'] is None)): raise ValueError('Missing the required parameter `name` when calling `patch_namespaced_job_status`') if (('namespace' not in params) or (params['namespace'] is None)): raise ValueError('Missing the required parameter `namespace` when calling `patch_namespaced_job_status`') if (('body' not in params) or (params['body'] is None)): raise ValueError('Missing the required parameter `body` when calling `patch_namespaced_job_status`') collection_formats = {} path_params = {} if ('name' in params): path_params['name'] = params['name'] if ('namespace' in params): path_params['namespace'] = params['namespace'] query_params = [] if ('pretty' in params): query_params.append(('pretty', params['pretty'])) if ('dry_run' in params): query_params.append(('dryRun', params['dry_run'])) header_params = {} form_params = [] local_var_files = {} body_params = None if ('body' in params): body_params = params['body'] header_params['Accept'] = self.api_client.select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) header_params['Content-Type'] = self.api_client.select_header_content_type(['application/json-patch+json', 'application/merge-patch+json', 'application/strategic-merge-patch+json']) auth_settings = ['BearerToken'] return self.api_client.call_api('/apis/batch/v1/namespaces/{namespace}/jobs/{name}/status', 'PATCH', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1Job', auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
def read_namespaced_job(self, name, namespace, **kwargs): "\n read the specified Job\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.read_namespaced_job(name, namespace, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str name: name of the Job (required)\n :param str namespace: object name and auth scope, such as for teams and projects (required)\n :param str pretty: If 'true', then the output is pretty printed.\n :param bool exact: Should the export be exact. Exact export maintains cluster-specific fields like 'Namespace'.\n :param bool export: Should this value be exported. Export strips fields that a user can not specify.\n :return: V1Job\n If the method is called asynchronously,\n returns the request thread.\n " kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.read_namespaced_job_with_http_info(name, namespace, **kwargs) else: data = self.read_namespaced_job_with_http_info(name, namespace, **kwargs) return data
8,608,870,038,711,187,000
read the specified Job This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.read_namespaced_job(name, namespace, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the Job (required) :param str namespace: object name and auth scope, such as for teams and projects (required) :param str pretty: If 'true', then the output is pretty printed. :param bool exact: Should the export be exact. Exact export maintains cluster-specific fields like 'Namespace'. :param bool export: Should this value be exported. Export strips fields that a user can not specify. :return: V1Job If the method is called asynchronously, returns the request thread.
kubernetes/client/apis/batch_v1_api.py
read_namespaced_job
MiaoRachelYu/python
python
def read_namespaced_job(self, name, namespace, **kwargs): "\n read the specified Job\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.read_namespaced_job(name, namespace, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str name: name of the Job (required)\n :param str namespace: object name and auth scope, such as for teams and projects (required)\n :param str pretty: If 'true', then the output is pretty printed.\n :param bool exact: Should the export be exact. Exact export maintains cluster-specific fields like 'Namespace'.\n :param bool export: Should this value be exported. Export strips fields that a user can not specify.\n :return: V1Job\n If the method is called asynchronously,\n returns the request thread.\n " kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.read_namespaced_job_with_http_info(name, namespace, **kwargs) else: data = self.read_namespaced_job_with_http_info(name, namespace, **kwargs) return data
def read_namespaced_job_with_http_info(self, name, namespace, **kwargs): "\n read the specified Job\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.read_namespaced_job_with_http_info(name, namespace, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str name: name of the Job (required)\n :param str namespace: object name and auth scope, such as for teams and projects (required)\n :param str pretty: If 'true', then the output is pretty printed.\n :param bool exact: Should the export be exact. Exact export maintains cluster-specific fields like 'Namespace'.\n :param bool export: Should this value be exported. Export strips fields that a user can not specify.\n :return: V1Job\n If the method is called asynchronously,\n returns the request thread.\n " all_params = ['name', 'namespace', 'pretty', 'exact', 'export'] all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for (key, val) in iteritems(params['kwargs']): if (key not in all_params): raise TypeError(("Got an unexpected keyword argument '%s' to method read_namespaced_job" % key)) params[key] = val del params['kwargs'] if (('name' not in params) or (params['name'] is None)): raise ValueError('Missing the required parameter `name` when calling `read_namespaced_job`') if (('namespace' not in params) or (params['namespace'] is None)): raise ValueError('Missing the required parameter `namespace` when calling `read_namespaced_job`') collection_formats = {} path_params = {} if ('name' in params): path_params['name'] = params['name'] if ('namespace' in params): path_params['namespace'] = params['namespace'] query_params = [] if ('pretty' in params): query_params.append(('pretty', params['pretty'])) if ('exact' in params): query_params.append(('exact', params['exact'])) if ('export' in params): query_params.append(('export', params['export'])) header_params = {} form_params = [] local_var_files = {} body_params = None header_params['Accept'] = self.api_client.select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) header_params['Content-Type'] = self.api_client.select_header_content_type(['*/*']) auth_settings = ['BearerToken'] return self.api_client.call_api('/apis/batch/v1/namespaces/{namespace}/jobs/{name}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1Job', auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
-4,373,121,982,148,294,700
read the specified Job This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.read_namespaced_job_with_http_info(name, namespace, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the Job (required) :param str namespace: object name and auth scope, such as for teams and projects (required) :param str pretty: If 'true', then the output is pretty printed. :param bool exact: Should the export be exact. Exact export maintains cluster-specific fields like 'Namespace'. :param bool export: Should this value be exported. Export strips fields that a user can not specify. :return: V1Job If the method is called asynchronously, returns the request thread.
kubernetes/client/apis/batch_v1_api.py
read_namespaced_job_with_http_info
MiaoRachelYu/python
python
def read_namespaced_job_with_http_info(self, name, namespace, **kwargs): "\n read the specified Job\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.read_namespaced_job_with_http_info(name, namespace, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str name: name of the Job (required)\n :param str namespace: object name and auth scope, such as for teams and projects (required)\n :param str pretty: If 'true', then the output is pretty printed.\n :param bool exact: Should the export be exact. Exact export maintains cluster-specific fields like 'Namespace'.\n :param bool export: Should this value be exported. Export strips fields that a user can not specify.\n :return: V1Job\n If the method is called asynchronously,\n returns the request thread.\n " all_params = ['name', 'namespace', 'pretty', 'exact', 'export'] all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for (key, val) in iteritems(params['kwargs']): if (key not in all_params): raise TypeError(("Got an unexpected keyword argument '%s' to method read_namespaced_job" % key)) params[key] = val del params['kwargs'] if (('name' not in params) or (params['name'] is None)): raise ValueError('Missing the required parameter `name` when calling `read_namespaced_job`') if (('namespace' not in params) or (params['namespace'] is None)): raise ValueError('Missing the required parameter `namespace` when calling `read_namespaced_job`') collection_formats = {} path_params = {} if ('name' in params): path_params['name'] = params['name'] if ('namespace' in params): path_params['namespace'] = params['namespace'] query_params = [] if ('pretty' in params): query_params.append(('pretty', params['pretty'])) if ('exact' in params): query_params.append(('exact', params['exact'])) if ('export' in params): query_params.append(('export', params['export'])) header_params = {} form_params = [] local_var_files = {} body_params = None header_params['Accept'] = self.api_client.select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) header_params['Content-Type'] = self.api_client.select_header_content_type(['*/*']) auth_settings = ['BearerToken'] return self.api_client.call_api('/apis/batch/v1/namespaces/{namespace}/jobs/{name}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1Job', auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
def read_namespaced_job_status(self, name, namespace, **kwargs): "\n read status of the specified Job\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.read_namespaced_job_status(name, namespace, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str name: name of the Job (required)\n :param str namespace: object name and auth scope, such as for teams and projects (required)\n :param str pretty: If 'true', then the output is pretty printed.\n :return: V1Job\n If the method is called asynchronously,\n returns the request thread.\n " kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.read_namespaced_job_status_with_http_info(name, namespace, **kwargs) else: data = self.read_namespaced_job_status_with_http_info(name, namespace, **kwargs) return data
-4,481,424,527,974,825,500
read status of the specified Job This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.read_namespaced_job_status(name, namespace, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the Job (required) :param str namespace: object name and auth scope, such as for teams and projects (required) :param str pretty: If 'true', then the output is pretty printed. :return: V1Job If the method is called asynchronously, returns the request thread.
kubernetes/client/apis/batch_v1_api.py
read_namespaced_job_status
MiaoRachelYu/python
python
def read_namespaced_job_status(self, name, namespace, **kwargs): "\n read status of the specified Job\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.read_namespaced_job_status(name, namespace, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str name: name of the Job (required)\n :param str namespace: object name and auth scope, such as for teams and projects (required)\n :param str pretty: If 'true', then the output is pretty printed.\n :return: V1Job\n If the method is called asynchronously,\n returns the request thread.\n " kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.read_namespaced_job_status_with_http_info(name, namespace, **kwargs) else: data = self.read_namespaced_job_status_with_http_info(name, namespace, **kwargs) return data
def read_namespaced_job_status_with_http_info(self, name, namespace, **kwargs): "\n read status of the specified Job\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.read_namespaced_job_status_with_http_info(name, namespace, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str name: name of the Job (required)\n :param str namespace: object name and auth scope, such as for teams and projects (required)\n :param str pretty: If 'true', then the output is pretty printed.\n :return: V1Job\n If the method is called asynchronously,\n returns the request thread.\n " all_params = ['name', 'namespace', 'pretty'] all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for (key, val) in iteritems(params['kwargs']): if (key not in all_params): raise TypeError(("Got an unexpected keyword argument '%s' to method read_namespaced_job_status" % key)) params[key] = val del params['kwargs'] if (('name' not in params) or (params['name'] is None)): raise ValueError('Missing the required parameter `name` when calling `read_namespaced_job_status`') if (('namespace' not in params) or (params['namespace'] is None)): raise ValueError('Missing the required parameter `namespace` when calling `read_namespaced_job_status`') collection_formats = {} path_params = {} if ('name' in params): path_params['name'] = params['name'] if ('namespace' in params): path_params['namespace'] = params['namespace'] query_params = [] if ('pretty' in params): query_params.append(('pretty', params['pretty'])) header_params = {} form_params = [] local_var_files = {} body_params = None header_params['Accept'] = self.api_client.select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) header_params['Content-Type'] = self.api_client.select_header_content_type(['*/*']) auth_settings = ['BearerToken'] return self.api_client.call_api('/apis/batch/v1/namespaces/{namespace}/jobs/{name}/status', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1Job', auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
8,040,640,776,958,956,000
read status of the specified Job This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.read_namespaced_job_status_with_http_info(name, namespace, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the Job (required) :param str namespace: object name and auth scope, such as for teams and projects (required) :param str pretty: If 'true', then the output is pretty printed. :return: V1Job If the method is called asynchronously, returns the request thread.
kubernetes/client/apis/batch_v1_api.py
read_namespaced_job_status_with_http_info
MiaoRachelYu/python
python
def read_namespaced_job_status_with_http_info(self, name, namespace, **kwargs): "\n read status of the specified Job\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.read_namespaced_job_status_with_http_info(name, namespace, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str name: name of the Job (required)\n :param str namespace: object name and auth scope, such as for teams and projects (required)\n :param str pretty: If 'true', then the output is pretty printed.\n :return: V1Job\n If the method is called asynchronously,\n returns the request thread.\n " all_params = ['name', 'namespace', 'pretty'] all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for (key, val) in iteritems(params['kwargs']): if (key not in all_params): raise TypeError(("Got an unexpected keyword argument '%s' to method read_namespaced_job_status" % key)) params[key] = val del params['kwargs'] if (('name' not in params) or (params['name'] is None)): raise ValueError('Missing the required parameter `name` when calling `read_namespaced_job_status`') if (('namespace' not in params) or (params['namespace'] is None)): raise ValueError('Missing the required parameter `namespace` when calling `read_namespaced_job_status`') collection_formats = {} path_params = {} if ('name' in params): path_params['name'] = params['name'] if ('namespace' in params): path_params['namespace'] = params['namespace'] query_params = [] if ('pretty' in params): query_params.append(('pretty', params['pretty'])) header_params = {} form_params = [] local_var_files = {} body_params = None header_params['Accept'] = self.api_client.select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) header_params['Content-Type'] = self.api_client.select_header_content_type(['*/*']) auth_settings = ['BearerToken'] return self.api_client.call_api('/apis/batch/v1/namespaces/{namespace}/jobs/{name}/status', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1Job', auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
def replace_namespaced_job(self, name, namespace, body, **kwargs): "\n replace the specified Job\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.replace_namespaced_job(name, namespace, body, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str name: name of the Job (required)\n :param str namespace: object name and auth scope, such as for teams and projects (required)\n :param V1Job body: (required)\n :param str pretty: If 'true', then the output is pretty printed.\n :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed\n :return: V1Job\n If the method is called asynchronously,\n returns the request thread.\n " kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.replace_namespaced_job_with_http_info(name, namespace, body, **kwargs) else: data = self.replace_namespaced_job_with_http_info(name, namespace, body, **kwargs) return data
1,828,721,135,564,521,500
replace the specified Job This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.replace_namespaced_job(name, namespace, body, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the Job (required) :param str namespace: object name and auth scope, such as for teams and projects (required) :param V1Job body: (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :return: V1Job If the method is called asynchronously, returns the request thread.
kubernetes/client/apis/batch_v1_api.py
replace_namespaced_job
MiaoRachelYu/python
python
def replace_namespaced_job(self, name, namespace, body, **kwargs): "\n replace the specified Job\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.replace_namespaced_job(name, namespace, body, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str name: name of the Job (required)\n :param str namespace: object name and auth scope, such as for teams and projects (required)\n :param V1Job body: (required)\n :param str pretty: If 'true', then the output is pretty printed.\n :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed\n :return: V1Job\n If the method is called asynchronously,\n returns the request thread.\n " kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.replace_namespaced_job_with_http_info(name, namespace, body, **kwargs) else: data = self.replace_namespaced_job_with_http_info(name, namespace, body, **kwargs) return data
def replace_namespaced_job_with_http_info(self, name, namespace, body, **kwargs): "\n replace the specified Job\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.replace_namespaced_job_with_http_info(name, namespace, body, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str name: name of the Job (required)\n :param str namespace: object name and auth scope, such as for teams and projects (required)\n :param V1Job body: (required)\n :param str pretty: If 'true', then the output is pretty printed.\n :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed\n :return: V1Job\n If the method is called asynchronously,\n returns the request thread.\n " all_params = ['name', 'namespace', 'body', 'pretty', 'dry_run'] all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for (key, val) in iteritems(params['kwargs']): if (key not in all_params): raise TypeError(("Got an unexpected keyword argument '%s' to method replace_namespaced_job" % key)) params[key] = val del params['kwargs'] if (('name' not in params) or (params['name'] is None)): raise ValueError('Missing the required parameter `name` when calling `replace_namespaced_job`') if (('namespace' not in params) or (params['namespace'] is None)): raise ValueError('Missing the required parameter `namespace` when calling `replace_namespaced_job`') if (('body' not in params) or (params['body'] is None)): raise ValueError('Missing the required parameter `body` when calling `replace_namespaced_job`') collection_formats = {} path_params = {} if ('name' in params): path_params['name'] = params['name'] if ('namespace' in params): path_params['namespace'] = params['namespace'] query_params = [] if ('pretty' in params): query_params.append(('pretty', params['pretty'])) if ('dry_run' in params): query_params.append(('dryRun', params['dry_run'])) header_params = {} form_params = [] local_var_files = {} body_params = None if ('body' in params): body_params = params['body'] header_params['Accept'] = self.api_client.select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) header_params['Content-Type'] = self.api_client.select_header_content_type(['*/*']) auth_settings = ['BearerToken'] return self.api_client.call_api('/apis/batch/v1/namespaces/{namespace}/jobs/{name}', 'PUT', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1Job', auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
-3,535,187,599,020,698,000
replace the specified Job This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.replace_namespaced_job_with_http_info(name, namespace, body, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the Job (required) :param str namespace: object name and auth scope, such as for teams and projects (required) :param V1Job body: (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :return: V1Job If the method is called asynchronously, returns the request thread.
kubernetes/client/apis/batch_v1_api.py
replace_namespaced_job_with_http_info
MiaoRachelYu/python
python
def replace_namespaced_job_with_http_info(self, name, namespace, body, **kwargs): "\n replace the specified Job\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.replace_namespaced_job_with_http_info(name, namespace, body, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str name: name of the Job (required)\n :param str namespace: object name and auth scope, such as for teams and projects (required)\n :param V1Job body: (required)\n :param str pretty: If 'true', then the output is pretty printed.\n :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed\n :return: V1Job\n If the method is called asynchronously,\n returns the request thread.\n " all_params = ['name', 'namespace', 'body', 'pretty', 'dry_run'] all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for (key, val) in iteritems(params['kwargs']): if (key not in all_params): raise TypeError(("Got an unexpected keyword argument '%s' to method replace_namespaced_job" % key)) params[key] = val del params['kwargs'] if (('name' not in params) or (params['name'] is None)): raise ValueError('Missing the required parameter `name` when calling `replace_namespaced_job`') if (('namespace' not in params) or (params['namespace'] is None)): raise ValueError('Missing the required parameter `namespace` when calling `replace_namespaced_job`') if (('body' not in params) or (params['body'] is None)): raise ValueError('Missing the required parameter `body` when calling `replace_namespaced_job`') collection_formats = {} path_params = {} if ('name' in params): path_params['name'] = params['name'] if ('namespace' in params): path_params['namespace'] = params['namespace'] query_params = [] if ('pretty' in params): query_params.append(('pretty', params['pretty'])) if ('dry_run' in params): query_params.append(('dryRun', params['dry_run'])) header_params = {} form_params = [] local_var_files = {} body_params = None if ('body' in params): body_params = params['body'] header_params['Accept'] = self.api_client.select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) header_params['Content-Type'] = self.api_client.select_header_content_type(['*/*']) auth_settings = ['BearerToken'] return self.api_client.call_api('/apis/batch/v1/namespaces/{namespace}/jobs/{name}', 'PUT', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1Job', auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
def replace_namespaced_job_status(self, name, namespace, body, **kwargs): "\n replace status of the specified Job\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.replace_namespaced_job_status(name, namespace, body, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str name: name of the Job (required)\n :param str namespace: object name and auth scope, such as for teams and projects (required)\n :param V1Job body: (required)\n :param str pretty: If 'true', then the output is pretty printed.\n :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed\n :return: V1Job\n If the method is called asynchronously,\n returns the request thread.\n " kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.replace_namespaced_job_status_with_http_info(name, namespace, body, **kwargs) else: data = self.replace_namespaced_job_status_with_http_info(name, namespace, body, **kwargs) return data
901,752,369,881,230,200
replace status of the specified Job This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.replace_namespaced_job_status(name, namespace, body, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the Job (required) :param str namespace: object name and auth scope, such as for teams and projects (required) :param V1Job body: (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :return: V1Job If the method is called asynchronously, returns the request thread.
kubernetes/client/apis/batch_v1_api.py
replace_namespaced_job_status
MiaoRachelYu/python
python
def replace_namespaced_job_status(self, name, namespace, body, **kwargs): "\n replace status of the specified Job\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.replace_namespaced_job_status(name, namespace, body, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str name: name of the Job (required)\n :param str namespace: object name and auth scope, such as for teams and projects (required)\n :param V1Job body: (required)\n :param str pretty: If 'true', then the output is pretty printed.\n :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed\n :return: V1Job\n If the method is called asynchronously,\n returns the request thread.\n " kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.replace_namespaced_job_status_with_http_info(name, namespace, body, **kwargs) else: data = self.replace_namespaced_job_status_with_http_info(name, namespace, body, **kwargs) return data
def replace_namespaced_job_status_with_http_info(self, name, namespace, body, **kwargs): "\n replace status of the specified Job\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.replace_namespaced_job_status_with_http_info(name, namespace, body, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str name: name of the Job (required)\n :param str namespace: object name and auth scope, such as for teams and projects (required)\n :param V1Job body: (required)\n :param str pretty: If 'true', then the output is pretty printed.\n :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed\n :return: V1Job\n If the method is called asynchronously,\n returns the request thread.\n " all_params = ['name', 'namespace', 'body', 'pretty', 'dry_run'] all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for (key, val) in iteritems(params['kwargs']): if (key not in all_params): raise TypeError(("Got an unexpected keyword argument '%s' to method replace_namespaced_job_status" % key)) params[key] = val del params['kwargs'] if (('name' not in params) or (params['name'] is None)): raise ValueError('Missing the required parameter `name` when calling `replace_namespaced_job_status`') if (('namespace' not in params) or (params['namespace'] is None)): raise ValueError('Missing the required parameter `namespace` when calling `replace_namespaced_job_status`') if (('body' not in params) or (params['body'] is None)): raise ValueError('Missing the required parameter `body` when calling `replace_namespaced_job_status`') collection_formats = {} path_params = {} if ('name' in params): path_params['name'] = params['name'] if ('namespace' in params): path_params['namespace'] = params['namespace'] query_params = [] if ('pretty' in params): query_params.append(('pretty', params['pretty'])) if ('dry_run' in params): query_params.append(('dryRun', params['dry_run'])) header_params = {} form_params = [] local_var_files = {} body_params = None if ('body' in params): body_params = params['body'] header_params['Accept'] = self.api_client.select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) header_params['Content-Type'] = self.api_client.select_header_content_type(['*/*']) auth_settings = ['BearerToken'] return self.api_client.call_api('/apis/batch/v1/namespaces/{namespace}/jobs/{name}/status', 'PUT', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1Job', auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
-8,610,756,846,133,023,000
replace status of the specified Job This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.replace_namespaced_job_status_with_http_info(name, namespace, body, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the Job (required) :param str namespace: object name and auth scope, such as for teams and projects (required) :param V1Job body: (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :return: V1Job If the method is called asynchronously, returns the request thread.
kubernetes/client/apis/batch_v1_api.py
replace_namespaced_job_status_with_http_info
MiaoRachelYu/python
python
def replace_namespaced_job_status_with_http_info(self, name, namespace, body, **kwargs): "\n replace status of the specified Job\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.replace_namespaced_job_status_with_http_info(name, namespace, body, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str name: name of the Job (required)\n :param str namespace: object name and auth scope, such as for teams and projects (required)\n :param V1Job body: (required)\n :param str pretty: If 'true', then the output is pretty printed.\n :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed\n :return: V1Job\n If the method is called asynchronously,\n returns the request thread.\n " all_params = ['name', 'namespace', 'body', 'pretty', 'dry_run'] all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for (key, val) in iteritems(params['kwargs']): if (key not in all_params): raise TypeError(("Got an unexpected keyword argument '%s' to method replace_namespaced_job_status" % key)) params[key] = val del params['kwargs'] if (('name' not in params) or (params['name'] is None)): raise ValueError('Missing the required parameter `name` when calling `replace_namespaced_job_status`') if (('namespace' not in params) or (params['namespace'] is None)): raise ValueError('Missing the required parameter `namespace` when calling `replace_namespaced_job_status`') if (('body' not in params) or (params['body'] is None)): raise ValueError('Missing the required parameter `body` when calling `replace_namespaced_job_status`') collection_formats = {} path_params = {} if ('name' in params): path_params['name'] = params['name'] if ('namespace' in params): path_params['namespace'] = params['namespace'] query_params = [] if ('pretty' in params): query_params.append(('pretty', params['pretty'])) if ('dry_run' in params): query_params.append(('dryRun', params['dry_run'])) header_params = {} form_params = [] local_var_files = {} body_params = None if ('body' in params): body_params = params['body'] header_params['Accept'] = self.api_client.select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) header_params['Content-Type'] = self.api_client.select_header_content_type(['*/*']) auth_settings = ['BearerToken'] return self.api_client.call_api('/apis/batch/v1/namespaces/{namespace}/jobs/{name}/status', 'PUT', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1Job', auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
def _setup_training(self) -> None: '\n Setup the different steps to train a model:\n - check if all the data are given\n - create the proper strategy\n - create the features\n - prepare the model settings\n ' self._prepare_dataset() with self.args.strategy.scope(): self._create_optimizer() _ = self.optimizer.iterations self._set_loss_and_metric() self._create_checkpoint_manager() self._create_summary_writer()
-995,004,629,150,649,300
Setup the different steps to train a model: - check if all the data are given - create the proper strategy - create the features - prepare the model settings
src/transformers/trainer_tf.py
_setup_training
52Pig/transformers
python
def _setup_training(self) -> None: '\n Setup the different steps to train a model:\n - check if all the data are given\n - create the proper strategy\n - create the features\n - prepare the model settings\n ' self._prepare_dataset() with self.args.strategy.scope(): self._create_optimizer() _ = self.optimizer.iterations self._set_loss_and_metric() self._create_checkpoint_manager() self._create_summary_writer()
def _set_loss_and_metric(self) -> None: '\n Create the training loss and metric with their name. Allowed names are those listed\n in the Tensorflow documentation and those contained in the transformers library.\n ' try: self.loss = tf.keras.losses.get({'class_name': self.args.loss_name, 'config': {'from_logits': True, 'reduction': tf.keras.losses.Reduction.NONE}}) except TypeError: self.loss = tf.keras.losses.get({'class_name': self.args.loss_name, 'config': {'reduction': tf.keras.losses.Reduction.NONE}})
8,436,403,641,088,140,000
Create the training loss and metric with their name. Allowed names are those listed in the Tensorflow documentation and those contained in the transformers library.
src/transformers/trainer_tf.py
_set_loss_and_metric
52Pig/transformers
python
def _set_loss_and_metric(self) -> None: '\n Create the training loss and metric with their name. Allowed names are those listed\n in the Tensorflow documentation and those contained in the transformers library.\n ' try: self.loss = tf.keras.losses.get({'class_name': self.args.loss_name, 'config': {'from_logits': True, 'reduction': tf.keras.losses.Reduction.NONE}}) except TypeError: self.loss = tf.keras.losses.get({'class_name': self.args.loss_name, 'config': {'reduction': tf.keras.losses.Reduction.NONE}})
def _create_summary_writer(self) -> None: '\n Create a summary writer to be able to read the logs in Tensorboard.\n ' self.writer = tf.summary.create_file_writer(self.args.logging_dir)
6,580,074,118,470,670,000
Create a summary writer to be able to read the logs in Tensorboard.
src/transformers/trainer_tf.py
_create_summary_writer
52Pig/transformers
python
def _create_summary_writer(self) -> None: '\n \n ' self.writer = tf.summary.create_file_writer(self.args.logging_dir)
def _prepare_dataset(self) -> None: '\n Prepare the training, validation and test data.\n ' if (self.train_dataset is not None): self.num_train_examples = self.train_dataset.reduce(tf.constant(0), (lambda x, _: (x + 1))).numpy() if (self.args.max_steps > 0): self.train_steps = self.args.max_steps else: self.train_steps: int = math.ceil((self.num_train_examples / self.args.train_batch_size)) self.train_dataset = self.train_dataset.cache().shuffle(self.num_train_examples).batch(self.args.train_batch_size).prefetch(tf.data.experimental.AUTOTUNE) if (self.args.max_steps > 0): self.train_dataset = self.train_dataset.repeat((- 1)) self.train_dataset = self.args.strategy.experimental_distribute_dataset(self.train_dataset) else: self.train_steps = 0 if (self.eval_dataset is not None): self.eval_dataset = self.eval_dataset.batch(self.args.eval_batch_size).cache().prefetch(tf.data.experimental.AUTOTUNE) self.eval_dataset = self.args.strategy.experimental_distribute_dataset(self.eval_dataset)
-2,703,407,660,692,259,300
Prepare the training, validation and test data.
src/transformers/trainer_tf.py
_prepare_dataset
52Pig/transformers
python
def _prepare_dataset(self) -> None: '\n \n ' if (self.train_dataset is not None): self.num_train_examples = self.train_dataset.reduce(tf.constant(0), (lambda x, _: (x + 1))).numpy() if (self.args.max_steps > 0): self.train_steps = self.args.max_steps else: self.train_steps: int = math.ceil((self.num_train_examples / self.args.train_batch_size)) self.train_dataset = self.train_dataset.cache().shuffle(self.num_train_examples).batch(self.args.train_batch_size).prefetch(tf.data.experimental.AUTOTUNE) if (self.args.max_steps > 0): self.train_dataset = self.train_dataset.repeat((- 1)) self.train_dataset = self.args.strategy.experimental_distribute_dataset(self.train_dataset) else: self.train_steps = 0 if (self.eval_dataset is not None): self.eval_dataset = self.eval_dataset.batch(self.args.eval_batch_size).cache().prefetch(tf.data.experimental.AUTOTUNE) self.eval_dataset = self.args.strategy.experimental_distribute_dataset(self.eval_dataset)
def _create_optimizer(self) -> None: '\n Create the training optimizer with its name. Allowed names are those listed\n in the Tensorflow documentation and those contained in the transformers library.\n ' if (self.args.optimizer_name == 'adamw'): self.optimizer = create_optimizer(self.args.learning_rate, self.train_steps, self.args.warmup_steps, self.args.end_lr) else: try: self.optimizer = tf.keras.optimizers.get({'class_name': self.args.optimizer_name, 'config': {'learning_rate': self.args.learning_rate, 'epsilon': self.args.adam_epsilon}}) except TypeError: self.optimizer = tf.keras.optimizers.get({'class_name': self.args.optimizer_name, 'config': {'learning_rate': self.args.learning_rate}}) logger.info('Created an/a {} optimizer'.format(self.args.optimizer_name))
1,140,083,078,374,236,300
Create the training optimizer with its name. Allowed names are those listed in the Tensorflow documentation and those contained in the transformers library.
src/transformers/trainer_tf.py
_create_optimizer
52Pig/transformers
python
def _create_optimizer(self) -> None: '\n Create the training optimizer with its name. Allowed names are those listed\n in the Tensorflow documentation and those contained in the transformers library.\n ' if (self.args.optimizer_name == 'adamw'): self.optimizer = create_optimizer(self.args.learning_rate, self.train_steps, self.args.warmup_steps, self.args.end_lr) else: try: self.optimizer = tf.keras.optimizers.get({'class_name': self.args.optimizer_name, 'config': {'learning_rate': self.args.learning_rate, 'epsilon': self.args.adam_epsilon}}) except TypeError: self.optimizer = tf.keras.optimizers.get({'class_name': self.args.optimizer_name, 'config': {'learning_rate': self.args.learning_rate}}) logger.info('Created an/a {} optimizer'.format(self.args.optimizer_name))
def _create_checkpoint_manager(self, max_to_keep: int=5, load_model: bool=True) -> None: '\n Create a checkpoint manager in order to be able to make the training\n fault-tolerant.\n Args:\n max_to_keep: the maximum number of checkpoints to keep in the checkpoint path.\n load_model: if we want to start the training from the latest checkpoint.\n ' ckpt = tf.train.Checkpoint(optimizer=self.optimizer, model=self.model) self.model.ckpt_manager = tf.train.CheckpointManager(ckpt, PREFIX_CHECKPOINT_DIR, max_to_keep=max_to_keep) if load_model: ckpt.restore(self.model.ckpt_manager.latest_checkpoint).expect_partial()
8,562,685,695,212,505,000
Create a checkpoint manager in order to be able to make the training fault-tolerant. Args: max_to_keep: the maximum number of checkpoints to keep in the checkpoint path. load_model: if we want to start the training from the latest checkpoint.
src/transformers/trainer_tf.py
_create_checkpoint_manager
52Pig/transformers
python
def _create_checkpoint_manager(self, max_to_keep: int=5, load_model: bool=True) -> None: '\n Create a checkpoint manager in order to be able to make the training\n fault-tolerant.\n Args:\n max_to_keep: the maximum number of checkpoints to keep in the checkpoint path.\n load_model: if we want to start the training from the latest checkpoint.\n ' ckpt = tf.train.Checkpoint(optimizer=self.optimizer, model=self.model) self.model.ckpt_manager = tf.train.CheckpointManager(ckpt, PREFIX_CHECKPOINT_DIR, max_to_keep=max_to_keep) if load_model: ckpt.restore(self.model.ckpt_manager.latest_checkpoint).expect_partial()
@tf.function def _evaluate_steps(self, per_replica_features, per_replica_labels): '\n One step evaluation across replica.\n Args:\n per_replica_features: the batched features.\n per_replica_labels: the batched labels.\n Returns:\n The loss corresponding to the given batch.\n ' (per_replica_loss, per_replica_logits) = self.args.strategy.experimental_run_v2(self._run_model, args=(per_replica_features, per_replica_labels, False)) try: reduced_loss = self.args.strategy.reduce(tf.distribute.ReduceOp.MEAN, per_replica_loss, axis=0) except ValueError: reduced_loss = self.args.strategy.reduce(tf.distribute.ReduceOp.MEAN, per_replica_loss, None) return (reduced_loss, per_replica_logits)
-4,422,511,022,122,007,000
One step evaluation across replica. Args: per_replica_features: the batched features. per_replica_labels: the batched labels. Returns: The loss corresponding to the given batch.
src/transformers/trainer_tf.py
_evaluate_steps
52Pig/transformers
python
@tf.function def _evaluate_steps(self, per_replica_features, per_replica_labels): '\n One step evaluation across replica.\n Args:\n per_replica_features: the batched features.\n per_replica_labels: the batched labels.\n Returns:\n The loss corresponding to the given batch.\n ' (per_replica_loss, per_replica_logits) = self.args.strategy.experimental_run_v2(self._run_model, args=(per_replica_features, per_replica_labels, False)) try: reduced_loss = self.args.strategy.reduce(tf.distribute.ReduceOp.MEAN, per_replica_loss, axis=0) except ValueError: reduced_loss = self.args.strategy.reduce(tf.distribute.ReduceOp.MEAN, per_replica_loss, None) return (reduced_loss, per_replica_logits)
def evaluate(self, eval_dataset: Optional[tf.data.Dataset]=None, prediction_loss_only: Optional[bool]=None) -> Dict[(str, float)]: '\n Prediction/evaluation loop, shared by `evaluate()` and `predict()`.\n ' if (eval_dataset is None): eval_dataset = self.eval_dataset output = self._prediction_loop(eval_dataset, description='Evaluation') return output.metrics
5,990,231,844,588,909,000
Prediction/evaluation loop, shared by `evaluate()` and `predict()`.
src/transformers/trainer_tf.py
evaluate
52Pig/transformers
python
def evaluate(self, eval_dataset: Optional[tf.data.Dataset]=None, prediction_loss_only: Optional[bool]=None) -> Dict[(str, float)]: '\n \n ' if (eval_dataset is None): eval_dataset = self.eval_dataset output = self._prediction_loop(eval_dataset, description='Evaluation') return output.metrics
def train(self) -> None: '\n Train method to train the model.\n ' if self.args.debug: tf.summary.trace_on(graph=True, profiler=True) self.gradient_accumulator.reset() iterations = self.optimizer.iterations if (iterations.numpy() > 0): logger.info('Start the training from the last checkpoint') start_epoch = ((iterations.numpy() // self.train_steps) + 1) else: start_epoch = 1 tf.summary.experimental.set_step(iterations) epochs = (1 if (self.args.max_steps > 0) else self.args.num_train_epochs) logger.info('***** Running training *****') logger.info(' Num examples = %d', self.num_train_examples) logger.info(' Num Epochs = %d', epochs) logger.info(' Total optimization steps = %d', self.train_steps) for epoch in range(start_epoch, int((epochs + 1))): for training_loss in self._training_steps(): step = iterations.numpy() if self.args.debug: with self.writer.as_default(): tf.summary.scalar('loss', training_loss, step=step) if ((step == 1) and self.args.debug): with self.writer.as_default(): tf.summary.trace_export(name='training', step=step, profiler_outdir=self.args.logging_dir) if (self.args.evaluate_during_training and ((step % self.args.eval_steps) == 0)): logs = {} results = self.evaluate() for (key, value) in results.items(): eval_key = 'eval_{}'.format(key) logs[eval_key] = value if callable(self.optimizer.learning_rate): logs['learning_rate'] = self.optimizer.learning_rate(step).numpy() else: logs['learning_rate'] = self.optimizer.learning_rate.numpy() logger.info('Epoch {} Step {} Validation Metrics {}'.format(epoch, step, logs)) with self.writer.as_default(): for (k, v) in logs.items(): tf.summary.scalar(k, v, step=step) if ((step % self.args.logging_steps) == 0): logger.info('Epoch {} Step {} Train Loss {:.4f}'.format(epoch, step, training_loss.numpy())) if ((step % self.args.save_steps) == 0): ckpt_save_path = self.model.ckpt_manager.save() logger.info('Saving checkpoint for step {} at {}'.format(step, ckpt_save_path)) if ((step % self.train_steps) == 0): break
7,017,242,654,244,077,000
Train method to train the model.
src/transformers/trainer_tf.py
train
52Pig/transformers
python
def train(self) -> None: '\n \n ' if self.args.debug: tf.summary.trace_on(graph=True, profiler=True) self.gradient_accumulator.reset() iterations = self.optimizer.iterations if (iterations.numpy() > 0): logger.info('Start the training from the last checkpoint') start_epoch = ((iterations.numpy() // self.train_steps) + 1) else: start_epoch = 1 tf.summary.experimental.set_step(iterations) epochs = (1 if (self.args.max_steps > 0) else self.args.num_train_epochs) logger.info('***** Running training *****') logger.info(' Num examples = %d', self.num_train_examples) logger.info(' Num Epochs = %d', epochs) logger.info(' Total optimization steps = %d', self.train_steps) for epoch in range(start_epoch, int((epochs + 1))): for training_loss in self._training_steps(): step = iterations.numpy() if self.args.debug: with self.writer.as_default(): tf.summary.scalar('loss', training_loss, step=step) if ((step == 1) and self.args.debug): with self.writer.as_default(): tf.summary.trace_export(name='training', step=step, profiler_outdir=self.args.logging_dir) if (self.args.evaluate_during_training and ((step % self.args.eval_steps) == 0)): logs = {} results = self.evaluate() for (key, value) in results.items(): eval_key = 'eval_{}'.format(key) logs[eval_key] = value if callable(self.optimizer.learning_rate): logs['learning_rate'] = self.optimizer.learning_rate(step).numpy() else: logs['learning_rate'] = self.optimizer.learning_rate.numpy() logger.info('Epoch {} Step {} Validation Metrics {}'.format(epoch, step, logs)) with self.writer.as_default(): for (k, v) in logs.items(): tf.summary.scalar(k, v, step=step) if ((step % self.args.logging_steps) == 0): logger.info('Epoch {} Step {} Train Loss {:.4f}'.format(epoch, step, training_loss.numpy())) if ((step % self.args.save_steps) == 0): ckpt_save_path = self.model.ckpt_manager.save() logger.info('Saving checkpoint for step {} at {}'.format(step, ckpt_save_path)) if ((step % self.train_steps) == 0): break
def _training_steps(self): '\n Returns a generator over training steps (i.e. parameters update).\n ' for (i, loss) in enumerate(self._accumulate_next_gradients()): if ((i % self.args.gradient_accumulation_steps) == 0): self._apply_gradients() (yield loss)
944,048,814,093,468,400
Returns a generator over training steps (i.e. parameters update).
src/transformers/trainer_tf.py
_training_steps
52Pig/transformers
python
def _training_steps(self): '\n \n ' for (i, loss) in enumerate(self._accumulate_next_gradients()): if ((i % self.args.gradient_accumulation_steps) == 0): self._apply_gradients() (yield loss)
@tf.function def _apply_gradients(self): 'Applies the gradients (cross-replica).' self.args.strategy.experimental_run_v2(self._step)
4,978,387,428,092,544,000
Applies the gradients (cross-replica).
src/transformers/trainer_tf.py
_apply_gradients
52Pig/transformers
python
@tf.function def _apply_gradients(self): self.args.strategy.experimental_run_v2(self._step)
def _step(self): 'Applies gradients and resets accumulation.' gradient_scale = (self.gradient_accumulator.step * self.args.strategy.num_replicas_in_sync) gradients = [(gradient / tf.cast(gradient_scale, gradient.dtype)) for gradient in self.gradient_accumulator.gradients] gradients = [tf.clip_by_value(grad, (- self.args.max_grad_norm), self.args.max_grad_norm) for grad in gradients] self.optimizer.apply_gradients(list(zip(gradients, self.model.trainable_variables))) self.gradient_accumulator.reset()
2,706,932,928,488,056,000
Applies gradients and resets accumulation.
src/transformers/trainer_tf.py
_step
52Pig/transformers
python
def _step(self): gradient_scale = (self.gradient_accumulator.step * self.args.strategy.num_replicas_in_sync) gradients = [(gradient / tf.cast(gradient_scale, gradient.dtype)) for gradient in self.gradient_accumulator.gradients] gradients = [tf.clip_by_value(grad, (- self.args.max_grad_norm), self.args.max_grad_norm) for grad in gradients] self.optimizer.apply_gradients(list(zip(gradients, self.model.trainable_variables))) self.gradient_accumulator.reset()
def _accumulate_next_gradients(self): 'Accumulates the gradients from the next element in dataset.' iterator = iter(self.train_dataset) @tf.function def _accumulate_next(): (per_replica_features, per_replica_labels) = next(iterator) return self._accumulate_gradients(per_replica_features, per_replica_labels) while True: try: (yield _accumulate_next()) except tf.errors.OutOfRangeError: break
1,351,652,028,254,688,800
Accumulates the gradients from the next element in dataset.
src/transformers/trainer_tf.py
_accumulate_next_gradients
52Pig/transformers
python
def _accumulate_next_gradients(self): iterator = iter(self.train_dataset) @tf.function def _accumulate_next(): (per_replica_features, per_replica_labels) = next(iterator) return self._accumulate_gradients(per_replica_features, per_replica_labels) while True: try: (yield _accumulate_next()) except tf.errors.OutOfRangeError: break
def _accumulate_gradients(self, per_replica_features, per_replica_labels): 'Accumulates the gradients across all the replica.' per_replica_loss = self.args.strategy.experimental_run_v2(self._forward, args=(per_replica_features, per_replica_labels)) try: reduced_loss = self.args.strategy.reduce(tf.distribute.ReduceOp.MEAN, per_replica_loss, axis=0) except ValueError: reduced_loss = self.args.strategy.reduce(tf.distribute.ReduceOp.MEAN, per_replica_loss, None) return reduced_loss
7,845,870,106,116,285,000
Accumulates the gradients across all the replica.
src/transformers/trainer_tf.py
_accumulate_gradients
52Pig/transformers
python
def _accumulate_gradients(self, per_replica_features, per_replica_labels): per_replica_loss = self.args.strategy.experimental_run_v2(self._forward, args=(per_replica_features, per_replica_labels)) try: reduced_loss = self.args.strategy.reduce(tf.distribute.ReduceOp.MEAN, per_replica_loss, axis=0) except ValueError: reduced_loss = self.args.strategy.reduce(tf.distribute.ReduceOp.MEAN, per_replica_loss, None) return reduced_loss
def _forward(self, features, labels): 'Forwards a training example and accumulates the gradients.' (per_example_loss, _) = self._run_model(features, labels, True) gradients = tf.gradients(per_example_loss, self.model.trainable_variables) gradients = [(g if (g is not None) else tf.zeros_like(v)) for (g, v) in zip(gradients, self.model.trainable_variables)] self.gradient_accumulator(gradients) return per_example_loss
-3,476,753,415,646,999,600
Forwards a training example and accumulates the gradients.
src/transformers/trainer_tf.py
_forward
52Pig/transformers
python
def _forward(self, features, labels): (per_example_loss, _) = self._run_model(features, labels, True) gradients = tf.gradients(per_example_loss, self.model.trainable_variables) gradients = [(g if (g is not None) else tf.zeros_like(v)) for (g, v) in zip(gradients, self.model.trainable_variables)] self.gradient_accumulator(gradients) return per_example_loss
def _run_model(self, features, labels, training): '\n Computes the loss of the given features and labels pair.\n Args:\n features: the batched features.\n labels: the batched labels.\n training: run the model in training mode or not\n ' if ((self.args.mode == 'text-classification') or (self.args.mode == 'token-classification')): logits = self.model(features, training=training)[0] else: logits = self.model(features, training=training) if (self.args.mode == 'token-classification'): active_loss = (tf.reshape(labels, ((- 1),)) != (- 1)) reduced_logits = tf.boolean_mask(tf.reshape(logits, ((- 1), shape_list(logits)[2])), active_loss) labels = tf.boolean_mask(tf.reshape(labels, ((- 1),)), active_loss) loss = self.loss(labels, reduced_logits) elif (self.args.mode == 'question-answering'): start_loss = self.loss(labels['start_position'], logits[0]) end_loss = self.loss(labels['end_position'], logits[1]) loss = ((start_loss + end_loss) / 2.0) else: loss = self.loss(labels, logits) loss += (sum(self.model.losses) * (1.0 / self.args.n_gpu)) return (loss, logits)
5,053,817,438,389,986,000
Computes the loss of the given features and labels pair. Args: features: the batched features. labels: the batched labels. training: run the model in training mode or not
src/transformers/trainer_tf.py
_run_model
52Pig/transformers
python
def _run_model(self, features, labels, training): '\n Computes the loss of the given features and labels pair.\n Args:\n features: the batched features.\n labels: the batched labels.\n training: run the model in training mode or not\n ' if ((self.args.mode == 'text-classification') or (self.args.mode == 'token-classification')): logits = self.model(features, training=training)[0] else: logits = self.model(features, training=training) if (self.args.mode == 'token-classification'): active_loss = (tf.reshape(labels, ((- 1),)) != (- 1)) reduced_logits = tf.boolean_mask(tf.reshape(logits, ((- 1), shape_list(logits)[2])), active_loss) labels = tf.boolean_mask(tf.reshape(labels, ((- 1),)), active_loss) loss = self.loss(labels, reduced_logits) elif (self.args.mode == 'question-answering'): start_loss = self.loss(labels['start_position'], logits[0]) end_loss = self.loss(labels['end_position'], logits[1]) loss = ((start_loss + end_loss) / 2.0) else: loss = self.loss(labels, logits) loss += (sum(self.model.losses) * (1.0 / self.args.n_gpu)) return (loss, logits)
def predict(self, test_dataset: tf.data.Dataset) -> PredictionOutput: '\n Run prediction and return predictions and potential metrics.\n Depending on the dataset and your use case, your test dataset may contain labels.\n In that case, this method will also return metrics, like in evaluate().\n Args:\n test_dataset: something similar to a PT Dataset. This is just\n temporary before to have a framework-agnostic approach for datasets.\n ' test_dataset = test_dataset.batch(self.args.eval_batch_size) test_dataset = self.args.strategy.experimental_distribute_dataset(test_dataset) return self._prediction_loop(test_dataset, description='Prediction')
8,778,713,418,168,482,000
Run prediction and return predictions and potential metrics. Depending on the dataset and your use case, your test dataset may contain labels. In that case, this method will also return metrics, like in evaluate(). Args: test_dataset: something similar to a PT Dataset. This is just temporary before to have a framework-agnostic approach for datasets.
src/transformers/trainer_tf.py
predict
52Pig/transformers
python
def predict(self, test_dataset: tf.data.Dataset) -> PredictionOutput: '\n Run prediction and return predictions and potential metrics.\n Depending on the dataset and your use case, your test dataset may contain labels.\n In that case, this method will also return metrics, like in evaluate().\n Args:\n test_dataset: something similar to a PT Dataset. This is just\n temporary before to have a framework-agnostic approach for datasets.\n ' test_dataset = test_dataset.batch(self.args.eval_batch_size) test_dataset = self.args.strategy.experimental_distribute_dataset(test_dataset) return self._prediction_loop(test_dataset, description='Prediction')
def save_model(self) -> None: '\n Save the pretrained model and create a Tensorflow saved model.\n ' logger.info('Saving model in {}'.format(self.args.output_dir)) path = os.path.join(self.args.output_dir, 'saved_model') logger.info('Saving model in {}'.format(path)) os.makedirs(path, exist_ok=True) self.model.save_pretrained(self.args.output_dir)
-6,240,600,897,507,889,000
Save the pretrained model and create a Tensorflow saved model.
src/transformers/trainer_tf.py
save_model
52Pig/transformers
python
def save_model(self) -> None: '\n \n ' logger.info('Saving model in {}'.format(self.args.output_dir)) path = os.path.join(self.args.output_dir, 'saved_model') logger.info('Saving model in {}'.format(path)) os.makedirs(path, exist_ok=True) self.model.save_pretrained(self.args.output_dir)
@property def marker(self): "\n The 'marker' property is an instance of Marker\n that may be specified as:\n - An instance of plotly.graph_objs.scattergeo.unselected.Marker\n - A dict of string/value properties that will be passed\n to the Marker constructor\n \n Supported dict properties:\n \n color\n Sets the marker color of unselected points,\n applied only when a selection exists.\n opacity\n Sets the marker opacity of unselected points,\n applied only when a selection exists.\n size\n Sets the marker size of unselected points,\n applied only when a selection exists.\n\n Returns\n -------\n plotly.graph_objs.scattergeo.unselected.Marker\n " return self['marker']
-4,245,806,239,186,361,300
The 'marker' property is an instance of Marker that may be specified as: - An instance of plotly.graph_objs.scattergeo.unselected.Marker - A dict of string/value properties that will be passed to the Marker constructor Supported dict properties: color Sets the marker color of unselected points, applied only when a selection exists. opacity Sets the marker opacity of unselected points, applied only when a selection exists. size Sets the marker size of unselected points, applied only when a selection exists. Returns ------- plotly.graph_objs.scattergeo.unselected.Marker
plotly/graph_objs/scattergeo/__init__.py
marker
Jonathan-MW/plotly.py
python
@property def marker(self): "\n The 'marker' property is an instance of Marker\n that may be specified as:\n - An instance of plotly.graph_objs.scattergeo.unselected.Marker\n - A dict of string/value properties that will be passed\n to the Marker constructor\n \n Supported dict properties:\n \n color\n Sets the marker color of unselected points,\n applied only when a selection exists.\n opacity\n Sets the marker opacity of unselected points,\n applied only when a selection exists.\n size\n Sets the marker size of unselected points,\n applied only when a selection exists.\n\n Returns\n -------\n plotly.graph_objs.scattergeo.unselected.Marker\n " return self['marker']
@property def textfont(self): "\n The 'textfont' property is an instance of Textfont\n that may be specified as:\n - An instance of plotly.graph_objs.scattergeo.unselected.Textfont\n - A dict of string/value properties that will be passed\n to the Textfont constructor\n \n Supported dict properties:\n \n color\n Sets the text font color of unselected points,\n applied only when a selection exists.\n\n Returns\n -------\n plotly.graph_objs.scattergeo.unselected.Textfont\n " return self['textfont']
8,917,031,519,796,172,000
The 'textfont' property is an instance of Textfont that may be specified as: - An instance of plotly.graph_objs.scattergeo.unselected.Textfont - A dict of string/value properties that will be passed to the Textfont constructor Supported dict properties: color Sets the text font color of unselected points, applied only when a selection exists. Returns ------- plotly.graph_objs.scattergeo.unselected.Textfont
plotly/graph_objs/scattergeo/__init__.py
textfont
Jonathan-MW/plotly.py
python
@property def textfont(self): "\n The 'textfont' property is an instance of Textfont\n that may be specified as:\n - An instance of plotly.graph_objs.scattergeo.unselected.Textfont\n - A dict of string/value properties that will be passed\n to the Textfont constructor\n \n Supported dict properties:\n \n color\n Sets the text font color of unselected points,\n applied only when a selection exists.\n\n Returns\n -------\n plotly.graph_objs.scattergeo.unselected.Textfont\n " return self['textfont']
def __init__(self, arg=None, marker=None, textfont=None, **kwargs): '\n Construct a new Unselected object\n \n Parameters\n ----------\n arg\n dict of properties compatible with this constructor or\n an instance of plotly.graph_objs.scattergeo.Unselected\n marker\n plotly.graph_objs.scattergeo.unselected.Marker instance\n or dict with compatible properties\n textfont\n plotly.graph_objs.scattergeo.unselected.Textfont\n instance or dict with compatible properties\n\n Returns\n -------\n Unselected\n ' super(Unselected, self).__init__('unselected') if (arg is None): arg = {} elif isinstance(arg, self.__class__): arg = arg.to_plotly_json() elif isinstance(arg, dict): arg = _copy.copy(arg) else: raise ValueError('The first argument to the plotly.graph_objs.scattergeo.Unselected \nconstructor must be a dict or \nan instance of plotly.graph_objs.scattergeo.Unselected') self._skip_invalid = kwargs.pop('skip_invalid', False) from plotly.validators.scattergeo import unselected as v_unselected self._validators['marker'] = v_unselected.MarkerValidator() self._validators['textfont'] = v_unselected.TextfontValidator() _v = arg.pop('marker', None) self['marker'] = (marker if (marker is not None) else _v) _v = arg.pop('textfont', None) self['textfont'] = (textfont if (textfont is not None) else _v) self._process_kwargs(**dict(arg, **kwargs)) self._skip_invalid = False
-6,556,748,513,909,606,000
Construct a new Unselected object Parameters ---------- arg dict of properties compatible with this constructor or an instance of plotly.graph_objs.scattergeo.Unselected marker plotly.graph_objs.scattergeo.unselected.Marker instance or dict with compatible properties textfont plotly.graph_objs.scattergeo.unselected.Textfont instance or dict with compatible properties Returns ------- Unselected
plotly/graph_objs/scattergeo/__init__.py
__init__
Jonathan-MW/plotly.py
python
def __init__(self, arg=None, marker=None, textfont=None, **kwargs): '\n Construct a new Unselected object\n \n Parameters\n ----------\n arg\n dict of properties compatible with this constructor or\n an instance of plotly.graph_objs.scattergeo.Unselected\n marker\n plotly.graph_objs.scattergeo.unselected.Marker instance\n or dict with compatible properties\n textfont\n plotly.graph_objs.scattergeo.unselected.Textfont\n instance or dict with compatible properties\n\n Returns\n -------\n Unselected\n ' super(Unselected, self).__init__('unselected') if (arg is None): arg = {} elif isinstance(arg, self.__class__): arg = arg.to_plotly_json() elif isinstance(arg, dict): arg = _copy.copy(arg) else: raise ValueError('The first argument to the plotly.graph_objs.scattergeo.Unselected \nconstructor must be a dict or \nan instance of plotly.graph_objs.scattergeo.Unselected') self._skip_invalid = kwargs.pop('skip_invalid', False) from plotly.validators.scattergeo import unselected as v_unselected self._validators['marker'] = v_unselected.MarkerValidator() self._validators['textfont'] = v_unselected.TextfontValidator() _v = arg.pop('marker', None) self['marker'] = (marker if (marker is not None) else _v) _v = arg.pop('textfont', None) self['textfont'] = (textfont if (textfont is not None) else _v) self._process_kwargs(**dict(arg, **kwargs)) self._skip_invalid = False
@property def color(self): "\n The 'color' property is a color and may be specified as:\n - A hex string (e.g. '#ff0000')\n - An rgb/rgba string (e.g. 'rgb(255,0,0)')\n - An hsl/hsla string (e.g. 'hsl(0,100%,50%)')\n - An hsv/hsva string (e.g. 'hsv(0,100%,100%)')\n - A named CSS color:\n aliceblue, antiquewhite, aqua, aquamarine, azure,\n beige, bisque, black, blanchedalmond, blue,\n blueviolet, brown, burlywood, cadetblue,\n chartreuse, chocolate, coral, cornflowerblue,\n cornsilk, crimson, cyan, darkblue, darkcyan,\n darkgoldenrod, darkgray, darkgrey, darkgreen,\n darkkhaki, darkmagenta, darkolivegreen, darkorange,\n darkorchid, darkred, darksalmon, darkseagreen,\n darkslateblue, darkslategray, darkslategrey,\n darkturquoise, darkviolet, deeppink, deepskyblue,\n dimgray, dimgrey, dodgerblue, firebrick,\n floralwhite, forestgreen, fuchsia, gainsboro,\n ghostwhite, gold, goldenrod, gray, grey, green,\n greenyellow, honeydew, hotpink, indianred, indigo,\n ivory, khaki, lavender, lavenderblush, lawngreen,\n lemonchiffon, lightblue, lightcoral, lightcyan,\n lightgoldenrodyellow, lightgray, lightgrey,\n lightgreen, lightpink, lightsalmon, lightseagreen,\n lightskyblue, lightslategray, lightslategrey,\n lightsteelblue, lightyellow, lime, limegreen,\n linen, magenta, maroon, mediumaquamarine,\n mediumblue, mediumorchid, mediumpurple,\n mediumseagreen, mediumslateblue, mediumspringgreen,\n mediumturquoise, mediumvioletred, midnightblue,\n mintcream, mistyrose, moccasin, navajowhite, navy,\n oldlace, olive, olivedrab, orange, orangered,\n orchid, palegoldenrod, palegreen, paleturquoise,\n palevioletred, papayawhip, peachpuff, peru, pink,\n plum, powderblue, purple, red, rosybrown,\n royalblue, saddlebrown, salmon, sandybrown,\n seagreen, seashell, sienna, silver, skyblue,\n slateblue, slategray, slategrey, snow, springgreen,\n steelblue, tan, teal, thistle, tomato, turquoise,\n violet, wheat, white, whitesmoke, yellow,\n yellowgreen\n - A list or array of any of the above\n\n Returns\n -------\n str|numpy.ndarray\n " return self['color']
-3,098,539,584,895,587,000
The 'color' property is a color and may be specified as: - A hex string (e.g. '#ff0000') - An rgb/rgba string (e.g. 'rgb(255,0,0)') - An hsl/hsla string (e.g. 'hsl(0,100%,50%)') - An hsv/hsva string (e.g. 'hsv(0,100%,100%)') - A named CSS color: aliceblue, antiquewhite, aqua, aquamarine, azure, beige, bisque, black, blanchedalmond, blue, blueviolet, brown, burlywood, cadetblue, chartreuse, chocolate, coral, cornflowerblue, cornsilk, crimson, cyan, darkblue, darkcyan, darkgoldenrod, darkgray, darkgrey, darkgreen, darkkhaki, darkmagenta, darkolivegreen, darkorange, darkorchid, darkred, darksalmon, darkseagreen, darkslateblue, darkslategray, darkslategrey, darkturquoise, darkviolet, deeppink, deepskyblue, dimgray, dimgrey, dodgerblue, firebrick, floralwhite, forestgreen, fuchsia, gainsboro, ghostwhite, gold, goldenrod, gray, grey, green, greenyellow, honeydew, hotpink, indianred, indigo, ivory, khaki, lavender, lavenderblush, lawngreen, lemonchiffon, lightblue, lightcoral, lightcyan, lightgoldenrodyellow, lightgray, lightgrey, lightgreen, lightpink, lightsalmon, lightseagreen, lightskyblue, lightslategray, lightslategrey, lightsteelblue, lightyellow, lime, limegreen, linen, magenta, maroon, mediumaquamarine, mediumblue, mediumorchid, mediumpurple, mediumseagreen, mediumslateblue, mediumspringgreen, mediumturquoise, mediumvioletred, midnightblue, mintcream, mistyrose, moccasin, navajowhite, navy, oldlace, olive, olivedrab, orange, orangered, orchid, palegoldenrod, palegreen, paleturquoise, palevioletred, papayawhip, peachpuff, peru, pink, plum, powderblue, purple, red, rosybrown, royalblue, saddlebrown, salmon, sandybrown, seagreen, seashell, sienna, silver, skyblue, slateblue, slategray, slategrey, snow, springgreen, steelblue, tan, teal, thistle, tomato, turquoise, violet, wheat, white, whitesmoke, yellow, yellowgreen - A list or array of any of the above Returns ------- str|numpy.ndarray
plotly/graph_objs/scattergeo/__init__.py
color
Jonathan-MW/plotly.py
python
@property def color(self): "\n The 'color' property is a color and may be specified as:\n - A hex string (e.g. '#ff0000')\n - An rgb/rgba string (e.g. 'rgb(255,0,0)')\n - An hsl/hsla string (e.g. 'hsl(0,100%,50%)')\n - An hsv/hsva string (e.g. 'hsv(0,100%,100%)')\n - A named CSS color:\n aliceblue, antiquewhite, aqua, aquamarine, azure,\n beige, bisque, black, blanchedalmond, blue,\n blueviolet, brown, burlywood, cadetblue,\n chartreuse, chocolate, coral, cornflowerblue,\n cornsilk, crimson, cyan, darkblue, darkcyan,\n darkgoldenrod, darkgray, darkgrey, darkgreen,\n darkkhaki, darkmagenta, darkolivegreen, darkorange,\n darkorchid, darkred, darksalmon, darkseagreen,\n darkslateblue, darkslategray, darkslategrey,\n darkturquoise, darkviolet, deeppink, deepskyblue,\n dimgray, dimgrey, dodgerblue, firebrick,\n floralwhite, forestgreen, fuchsia, gainsboro,\n ghostwhite, gold, goldenrod, gray, grey, green,\n greenyellow, honeydew, hotpink, indianred, indigo,\n ivory, khaki, lavender, lavenderblush, lawngreen,\n lemonchiffon, lightblue, lightcoral, lightcyan,\n lightgoldenrodyellow, lightgray, lightgrey,\n lightgreen, lightpink, lightsalmon, lightseagreen,\n lightskyblue, lightslategray, lightslategrey,\n lightsteelblue, lightyellow, lime, limegreen,\n linen, magenta, maroon, mediumaquamarine,\n mediumblue, mediumorchid, mediumpurple,\n mediumseagreen, mediumslateblue, mediumspringgreen,\n mediumturquoise, mediumvioletred, midnightblue,\n mintcream, mistyrose, moccasin, navajowhite, navy,\n oldlace, olive, olivedrab, orange, orangered,\n orchid, palegoldenrod, palegreen, paleturquoise,\n palevioletred, papayawhip, peachpuff, peru, pink,\n plum, powderblue, purple, red, rosybrown,\n royalblue, saddlebrown, salmon, sandybrown,\n seagreen, seashell, sienna, silver, skyblue,\n slateblue, slategray, slategrey, snow, springgreen,\n steelblue, tan, teal, thistle, tomato, turquoise,\n violet, wheat, white, whitesmoke, yellow,\n yellowgreen\n - A list or array of any of the above\n\n Returns\n -------\n str|numpy.ndarray\n " return self['color']
@property def colorsrc(self): "\n Sets the source reference on plot.ly for color .\n \n The 'colorsrc' property must be specified as a string or\n as a plotly.grid_objs.Column object\n\n Returns\n -------\n str\n " return self['colorsrc']
-4,192,673,473,912,421,000
Sets the source reference on plot.ly for color . The 'colorsrc' property must be specified as a string or as a plotly.grid_objs.Column object Returns ------- str
plotly/graph_objs/scattergeo/__init__.py
colorsrc
Jonathan-MW/plotly.py
python
@property def colorsrc(self): "\n Sets the source reference on plot.ly for color .\n \n The 'colorsrc' property must be specified as a string or\n as a plotly.grid_objs.Column object\n\n Returns\n -------\n str\n " return self['colorsrc']
@property def family(self): '\n HTML font family - the typeface that will be applied by the web\n browser. The web browser will only be able to apply a font if\n it is available on the system which it operates. Provide\n multiple font families, separated by commas, to indicate the\n preference in which to apply fonts if they aren\'t available on\n the system. The plotly service (at https://plot.ly or on-\n premise) generates images on a server, where only a select\n number of fonts are installed and supported. These include\n "Arial", "Balto", "Courier New", "Droid Sans",, "Droid Serif",\n "Droid Sans Mono", "Gravitas One", "Old Standard TT", "Open\n Sans", "Overpass", "PT Sans Narrow", "Raleway", "Times New\n Roman".\n \n The \'family\' property is a string and must be specified as:\n - A non-empty string\n - A tuple, list, or one-dimensional numpy array of the above\n\n Returns\n -------\n str|numpy.ndarray\n ' return self['family']
-3,524,569,398,637,699,600
HTML font family - the typeface that will be applied by the web browser. The web browser will only be able to apply a font if it is available on the system which it operates. Provide multiple font families, separated by commas, to indicate the preference in which to apply fonts if they aren't available on the system. The plotly service (at https://plot.ly or on- premise) generates images on a server, where only a select number of fonts are installed and supported. These include "Arial", "Balto", "Courier New", "Droid Sans",, "Droid Serif", "Droid Sans Mono", "Gravitas One", "Old Standard TT", "Open Sans", "Overpass", "PT Sans Narrow", "Raleway", "Times New Roman". The 'family' property is a string and must be specified as: - A non-empty string - A tuple, list, or one-dimensional numpy array of the above Returns ------- str|numpy.ndarray
plotly/graph_objs/scattergeo/__init__.py
family
Jonathan-MW/plotly.py
python
@property def family(self): '\n HTML font family - the typeface that will be applied by the web\n browser. The web browser will only be able to apply a font if\n it is available on the system which it operates. Provide\n multiple font families, separated by commas, to indicate the\n preference in which to apply fonts if they aren\'t available on\n the system. The plotly service (at https://plot.ly or on-\n premise) generates images on a server, where only a select\n number of fonts are installed and supported. These include\n "Arial", "Balto", "Courier New", "Droid Sans",, "Droid Serif",\n "Droid Sans Mono", "Gravitas One", "Old Standard TT", "Open\n Sans", "Overpass", "PT Sans Narrow", "Raleway", "Times New\n Roman".\n \n The \'family\' property is a string and must be specified as:\n - A non-empty string\n - A tuple, list, or one-dimensional numpy array of the above\n\n Returns\n -------\n str|numpy.ndarray\n ' return self['family']
@property def familysrc(self): "\n Sets the source reference on plot.ly for family .\n \n The 'familysrc' property must be specified as a string or\n as a plotly.grid_objs.Column object\n\n Returns\n -------\n str\n " return self['familysrc']
4,272,517,766,265,763,300
Sets the source reference on plot.ly for family . The 'familysrc' property must be specified as a string or as a plotly.grid_objs.Column object Returns ------- str
plotly/graph_objs/scattergeo/__init__.py
familysrc
Jonathan-MW/plotly.py
python
@property def familysrc(self): "\n Sets the source reference on plot.ly for family .\n \n The 'familysrc' property must be specified as a string or\n as a plotly.grid_objs.Column object\n\n Returns\n -------\n str\n " return self['familysrc']
@property def size(self): "\n The 'size' property is a number and may be specified as:\n - An int or float in the interval [1, inf]\n - A tuple, list, or one-dimensional numpy array of the above\n\n Returns\n -------\n int|float|numpy.ndarray\n " return self['size']
6,887,128,696,685,480,000
The 'size' property is a number and may be specified as: - An int or float in the interval [1, inf] - A tuple, list, or one-dimensional numpy array of the above Returns ------- int|float|numpy.ndarray
plotly/graph_objs/scattergeo/__init__.py
size
Jonathan-MW/plotly.py
python
@property def size(self): "\n The 'size' property is a number and may be specified as:\n - An int or float in the interval [1, inf]\n - A tuple, list, or one-dimensional numpy array of the above\n\n Returns\n -------\n int|float|numpy.ndarray\n " return self['size']
@property def sizesrc(self): "\n Sets the source reference on plot.ly for size .\n \n The 'sizesrc' property must be specified as a string or\n as a plotly.grid_objs.Column object\n\n Returns\n -------\n str\n " return self['sizesrc']
8,585,030,508,644,112,000
Sets the source reference on plot.ly for size . The 'sizesrc' property must be specified as a string or as a plotly.grid_objs.Column object Returns ------- str
plotly/graph_objs/scattergeo/__init__.py
sizesrc
Jonathan-MW/plotly.py
python
@property def sizesrc(self): "\n Sets the source reference on plot.ly for size .\n \n The 'sizesrc' property must be specified as a string or\n as a plotly.grid_objs.Column object\n\n Returns\n -------\n str\n " return self['sizesrc']
def __init__(self, arg=None, color=None, colorsrc=None, family=None, familysrc=None, size=None, sizesrc=None, **kwargs): '\n Construct a new Textfont object\n \n Sets the text font.\n\n Parameters\n ----------\n arg\n dict of properties compatible with this constructor or\n an instance of plotly.graph_objs.scattergeo.Textfont\n color\n\n colorsrc\n Sets the source reference on plot.ly for color .\n family\n HTML font family - the typeface that will be applied by\n the web browser. The web browser will only be able to\n apply a font if it is available on the system which it\n operates. Provide multiple font families, separated by\n commas, to indicate the preference in which to apply\n fonts if they aren\'t available on the system. The\n plotly service (at https://plot.ly or on-premise)\n generates images on a server, where only a select\n number of fonts are installed and supported. These\n include "Arial", "Balto", "Courier New", "Droid Sans",,\n "Droid Serif", "Droid Sans Mono", "Gravitas One", "Old\n Standard TT", "Open Sans", "Overpass", "PT Sans\n Narrow", "Raleway", "Times New Roman".\n familysrc\n Sets the source reference on plot.ly for family .\n size\n\n sizesrc\n Sets the source reference on plot.ly for size .\n\n Returns\n -------\n Textfont\n ' super(Textfont, self).__init__('textfont') if (arg is None): arg = {} elif isinstance(arg, self.__class__): arg = arg.to_plotly_json() elif isinstance(arg, dict): arg = _copy.copy(arg) else: raise ValueError('The first argument to the plotly.graph_objs.scattergeo.Textfont \nconstructor must be a dict or \nan instance of plotly.graph_objs.scattergeo.Textfont') self._skip_invalid = kwargs.pop('skip_invalid', False) from plotly.validators.scattergeo import textfont as v_textfont self._validators['color'] = v_textfont.ColorValidator() self._validators['colorsrc'] = v_textfont.ColorsrcValidator() self._validators['family'] = v_textfont.FamilyValidator() self._validators['familysrc'] = v_textfont.FamilysrcValidator() self._validators['size'] = v_textfont.SizeValidator() self._validators['sizesrc'] = v_textfont.SizesrcValidator() _v = arg.pop('color', None) self['color'] = (color if (color is not None) else _v) _v = arg.pop('colorsrc', None) self['colorsrc'] = (colorsrc if (colorsrc is not None) else _v) _v = arg.pop('family', None) self['family'] = (family if (family is not None) else _v) _v = arg.pop('familysrc', None) self['familysrc'] = (familysrc if (familysrc is not None) else _v) _v = arg.pop('size', None) self['size'] = (size if (size is not None) else _v) _v = arg.pop('sizesrc', None) self['sizesrc'] = (sizesrc if (sizesrc is not None) else _v) self._process_kwargs(**dict(arg, **kwargs)) self._skip_invalid = False
5,437,240,896,516,272,000
Construct a new Textfont object Sets the text font. Parameters ---------- arg dict of properties compatible with this constructor or an instance of plotly.graph_objs.scattergeo.Textfont color colorsrc Sets the source reference on plot.ly for color . family HTML font family - the typeface that will be applied by the web browser. The web browser will only be able to apply a font if it is available on the system which it operates. Provide multiple font families, separated by commas, to indicate the preference in which to apply fonts if they aren't available on the system. The plotly service (at https://plot.ly or on-premise) generates images on a server, where only a select number of fonts are installed and supported. These include "Arial", "Balto", "Courier New", "Droid Sans",, "Droid Serif", "Droid Sans Mono", "Gravitas One", "Old Standard TT", "Open Sans", "Overpass", "PT Sans Narrow", "Raleway", "Times New Roman". familysrc Sets the source reference on plot.ly for family . size sizesrc Sets the source reference on plot.ly for size . Returns ------- Textfont
plotly/graph_objs/scattergeo/__init__.py
__init__
Jonathan-MW/plotly.py
python
def __init__(self, arg=None, color=None, colorsrc=None, family=None, familysrc=None, size=None, sizesrc=None, **kwargs): '\n Construct a new Textfont object\n \n Sets the text font.\n\n Parameters\n ----------\n arg\n dict of properties compatible with this constructor or\n an instance of plotly.graph_objs.scattergeo.Textfont\n color\n\n colorsrc\n Sets the source reference on plot.ly for color .\n family\n HTML font family - the typeface that will be applied by\n the web browser. The web browser will only be able to\n apply a font if it is available on the system which it\n operates. Provide multiple font families, separated by\n commas, to indicate the preference in which to apply\n fonts if they aren\'t available on the system. The\n plotly service (at https://plot.ly or on-premise)\n generates images on a server, where only a select\n number of fonts are installed and supported. These\n include "Arial", "Balto", "Courier New", "Droid Sans",,\n "Droid Serif", "Droid Sans Mono", "Gravitas One", "Old\n Standard TT", "Open Sans", "Overpass", "PT Sans\n Narrow", "Raleway", "Times New Roman".\n familysrc\n Sets the source reference on plot.ly for family .\n size\n\n sizesrc\n Sets the source reference on plot.ly for size .\n\n Returns\n -------\n Textfont\n ' super(Textfont, self).__init__('textfont') if (arg is None): arg = {} elif isinstance(arg, self.__class__): arg = arg.to_plotly_json() elif isinstance(arg, dict): arg = _copy.copy(arg) else: raise ValueError('The first argument to the plotly.graph_objs.scattergeo.Textfont \nconstructor must be a dict or \nan instance of plotly.graph_objs.scattergeo.Textfont') self._skip_invalid = kwargs.pop('skip_invalid', False) from plotly.validators.scattergeo import textfont as v_textfont self._validators['color'] = v_textfont.ColorValidator() self._validators['colorsrc'] = v_textfont.ColorsrcValidator() self._validators['family'] = v_textfont.FamilyValidator() self._validators['familysrc'] = v_textfont.FamilysrcValidator() self._validators['size'] = v_textfont.SizeValidator() self._validators['sizesrc'] = v_textfont.SizesrcValidator() _v = arg.pop('color', None) self['color'] = (color if (color is not None) else _v) _v = arg.pop('colorsrc', None) self['colorsrc'] = (colorsrc if (colorsrc is not None) else _v) _v = arg.pop('family', None) self['family'] = (family if (family is not None) else _v) _v = arg.pop('familysrc', None) self['familysrc'] = (familysrc if (familysrc is not None) else _v) _v = arg.pop('size', None) self['size'] = (size if (size is not None) else _v) _v = arg.pop('sizesrc', None) self['sizesrc'] = (sizesrc if (sizesrc is not None) else _v) self._process_kwargs(**dict(arg, **kwargs)) self._skip_invalid = False
@property def maxpoints(self): "\n Sets the maximum number of points to keep on the plots from an\n incoming stream. If `maxpoints` is set to 50, only the newest\n 50 points will be displayed on the plot.\n \n The 'maxpoints' property is a number and may be specified as:\n - An int or float in the interval [0, 10000]\n\n Returns\n -------\n int|float\n " return self['maxpoints']
6,188,310,185,754,620,000
Sets the maximum number of points to keep on the plots from an incoming stream. If `maxpoints` is set to 50, only the newest 50 points will be displayed on the plot. The 'maxpoints' property is a number and may be specified as: - An int or float in the interval [0, 10000] Returns ------- int|float
plotly/graph_objs/scattergeo/__init__.py
maxpoints
Jonathan-MW/plotly.py
python
@property def maxpoints(self): "\n Sets the maximum number of points to keep on the plots from an\n incoming stream. If `maxpoints` is set to 50, only the newest\n 50 points will be displayed on the plot.\n \n The 'maxpoints' property is a number and may be specified as:\n - An int or float in the interval [0, 10000]\n\n Returns\n -------\n int|float\n " return self['maxpoints']
@property def token(self): "\n The stream id number links a data trace on a plot with a\n stream. See https://plot.ly/settings for more details.\n \n The 'token' property is a string and must be specified as:\n - A non-empty string\n\n Returns\n -------\n str\n " return self['token']
-6,778,178,393,472,647,000
The stream id number links a data trace on a plot with a stream. See https://plot.ly/settings for more details. The 'token' property is a string and must be specified as: - A non-empty string Returns ------- str
plotly/graph_objs/scattergeo/__init__.py
token
Jonathan-MW/plotly.py
python
@property def token(self): "\n The stream id number links a data trace on a plot with a\n stream. See https://plot.ly/settings for more details.\n \n The 'token' property is a string and must be specified as:\n - A non-empty string\n\n Returns\n -------\n str\n " return self['token']
def __init__(self, arg=None, maxpoints=None, token=None, **kwargs): '\n Construct a new Stream object\n \n Parameters\n ----------\n arg\n dict of properties compatible with this constructor or\n an instance of plotly.graph_objs.scattergeo.Stream\n maxpoints\n Sets the maximum number of points to keep on the plots\n from an incoming stream. If `maxpoints` is set to 50,\n only the newest 50 points will be displayed on the\n plot.\n token\n The stream id number links a data trace on a plot with\n a stream. See https://plot.ly/settings for more\n details.\n\n Returns\n -------\n Stream\n ' super(Stream, self).__init__('stream') if (arg is None): arg = {} elif isinstance(arg, self.__class__): arg = arg.to_plotly_json() elif isinstance(arg, dict): arg = _copy.copy(arg) else: raise ValueError('The first argument to the plotly.graph_objs.scattergeo.Stream \nconstructor must be a dict or \nan instance of plotly.graph_objs.scattergeo.Stream') self._skip_invalid = kwargs.pop('skip_invalid', False) from plotly.validators.scattergeo import stream as v_stream self._validators['maxpoints'] = v_stream.MaxpointsValidator() self._validators['token'] = v_stream.TokenValidator() _v = arg.pop('maxpoints', None) self['maxpoints'] = (maxpoints if (maxpoints is not None) else _v) _v = arg.pop('token', None) self['token'] = (token if (token is not None) else _v) self._process_kwargs(**dict(arg, **kwargs)) self._skip_invalid = False
7,141,075,823,921,744,000
Construct a new Stream object Parameters ---------- arg dict of properties compatible with this constructor or an instance of plotly.graph_objs.scattergeo.Stream maxpoints Sets the maximum number of points to keep on the plots from an incoming stream. If `maxpoints` is set to 50, only the newest 50 points will be displayed on the plot. token The stream id number links a data trace on a plot with a stream. See https://plot.ly/settings for more details. Returns ------- Stream
plotly/graph_objs/scattergeo/__init__.py
__init__
Jonathan-MW/plotly.py
python
def __init__(self, arg=None, maxpoints=None, token=None, **kwargs): '\n Construct a new Stream object\n \n Parameters\n ----------\n arg\n dict of properties compatible with this constructor or\n an instance of plotly.graph_objs.scattergeo.Stream\n maxpoints\n Sets the maximum number of points to keep on the plots\n from an incoming stream. If `maxpoints` is set to 50,\n only the newest 50 points will be displayed on the\n plot.\n token\n The stream id number links a data trace on a plot with\n a stream. See https://plot.ly/settings for more\n details.\n\n Returns\n -------\n Stream\n ' super(Stream, self).__init__('stream') if (arg is None): arg = {} elif isinstance(arg, self.__class__): arg = arg.to_plotly_json() elif isinstance(arg, dict): arg = _copy.copy(arg) else: raise ValueError('The first argument to the plotly.graph_objs.scattergeo.Stream \nconstructor must be a dict or \nan instance of plotly.graph_objs.scattergeo.Stream') self._skip_invalid = kwargs.pop('skip_invalid', False) from plotly.validators.scattergeo import stream as v_stream self._validators['maxpoints'] = v_stream.MaxpointsValidator() self._validators['token'] = v_stream.TokenValidator() _v = arg.pop('maxpoints', None) self['maxpoints'] = (maxpoints if (maxpoints is not None) else _v) _v = arg.pop('token', None) self['token'] = (token if (token is not None) else _v) self._process_kwargs(**dict(arg, **kwargs)) self._skip_invalid = False
@property def marker(self): "\n The 'marker' property is an instance of Marker\n that may be specified as:\n - An instance of plotly.graph_objs.scattergeo.selected.Marker\n - A dict of string/value properties that will be passed\n to the Marker constructor\n \n Supported dict properties:\n \n color\n Sets the marker color of selected points.\n opacity\n Sets the marker opacity of selected points.\n size\n Sets the marker size of selected points.\n\n Returns\n -------\n plotly.graph_objs.scattergeo.selected.Marker\n " return self['marker']
7,421,703,061,020,846,000
The 'marker' property is an instance of Marker that may be specified as: - An instance of plotly.graph_objs.scattergeo.selected.Marker - A dict of string/value properties that will be passed to the Marker constructor Supported dict properties: color Sets the marker color of selected points. opacity Sets the marker opacity of selected points. size Sets the marker size of selected points. Returns ------- plotly.graph_objs.scattergeo.selected.Marker
plotly/graph_objs/scattergeo/__init__.py
marker
Jonathan-MW/plotly.py
python
@property def marker(self): "\n The 'marker' property is an instance of Marker\n that may be specified as:\n - An instance of plotly.graph_objs.scattergeo.selected.Marker\n - A dict of string/value properties that will be passed\n to the Marker constructor\n \n Supported dict properties:\n \n color\n Sets the marker color of selected points.\n opacity\n Sets the marker opacity of selected points.\n size\n Sets the marker size of selected points.\n\n Returns\n -------\n plotly.graph_objs.scattergeo.selected.Marker\n " return self['marker']
@property def textfont(self): "\n The 'textfont' property is an instance of Textfont\n that may be specified as:\n - An instance of plotly.graph_objs.scattergeo.selected.Textfont\n - A dict of string/value properties that will be passed\n to the Textfont constructor\n \n Supported dict properties:\n \n color\n Sets the text font color of selected points.\n\n Returns\n -------\n plotly.graph_objs.scattergeo.selected.Textfont\n " return self['textfont']
6,163,905,276,933,022,000
The 'textfont' property is an instance of Textfont that may be specified as: - An instance of plotly.graph_objs.scattergeo.selected.Textfont - A dict of string/value properties that will be passed to the Textfont constructor Supported dict properties: color Sets the text font color of selected points. Returns ------- plotly.graph_objs.scattergeo.selected.Textfont
plotly/graph_objs/scattergeo/__init__.py
textfont
Jonathan-MW/plotly.py
python
@property def textfont(self): "\n The 'textfont' property is an instance of Textfont\n that may be specified as:\n - An instance of plotly.graph_objs.scattergeo.selected.Textfont\n - A dict of string/value properties that will be passed\n to the Textfont constructor\n \n Supported dict properties:\n \n color\n Sets the text font color of selected points.\n\n Returns\n -------\n plotly.graph_objs.scattergeo.selected.Textfont\n " return self['textfont']
def __init__(self, arg=None, marker=None, textfont=None, **kwargs): '\n Construct a new Selected object\n \n Parameters\n ----------\n arg\n dict of properties compatible with this constructor or\n an instance of plotly.graph_objs.scattergeo.Selected\n marker\n plotly.graph_objs.scattergeo.selected.Marker instance\n or dict with compatible properties\n textfont\n plotly.graph_objs.scattergeo.selected.Textfont instance\n or dict with compatible properties\n\n Returns\n -------\n Selected\n ' super(Selected, self).__init__('selected') if (arg is None): arg = {} elif isinstance(arg, self.__class__): arg = arg.to_plotly_json() elif isinstance(arg, dict): arg = _copy.copy(arg) else: raise ValueError('The first argument to the plotly.graph_objs.scattergeo.Selected \nconstructor must be a dict or \nan instance of plotly.graph_objs.scattergeo.Selected') self._skip_invalid = kwargs.pop('skip_invalid', False) from plotly.validators.scattergeo import selected as v_selected self._validators['marker'] = v_selected.MarkerValidator() self._validators['textfont'] = v_selected.TextfontValidator() _v = arg.pop('marker', None) self['marker'] = (marker if (marker is not None) else _v) _v = arg.pop('textfont', None) self['textfont'] = (textfont if (textfont is not None) else _v) self._process_kwargs(**dict(arg, **kwargs)) self._skip_invalid = False
189,164,675,801,328,930
Construct a new Selected object Parameters ---------- arg dict of properties compatible with this constructor or an instance of plotly.graph_objs.scattergeo.Selected marker plotly.graph_objs.scattergeo.selected.Marker instance or dict with compatible properties textfont plotly.graph_objs.scattergeo.selected.Textfont instance or dict with compatible properties Returns ------- Selected
plotly/graph_objs/scattergeo/__init__.py
__init__
Jonathan-MW/plotly.py
python
def __init__(self, arg=None, marker=None, textfont=None, **kwargs): '\n Construct a new Selected object\n \n Parameters\n ----------\n arg\n dict of properties compatible with this constructor or\n an instance of plotly.graph_objs.scattergeo.Selected\n marker\n plotly.graph_objs.scattergeo.selected.Marker instance\n or dict with compatible properties\n textfont\n plotly.graph_objs.scattergeo.selected.Textfont instance\n or dict with compatible properties\n\n Returns\n -------\n Selected\n ' super(Selected, self).__init__('selected') if (arg is None): arg = {} elif isinstance(arg, self.__class__): arg = arg.to_plotly_json() elif isinstance(arg, dict): arg = _copy.copy(arg) else: raise ValueError('The first argument to the plotly.graph_objs.scattergeo.Selected \nconstructor must be a dict or \nan instance of plotly.graph_objs.scattergeo.Selected') self._skip_invalid = kwargs.pop('skip_invalid', False) from plotly.validators.scattergeo import selected as v_selected self._validators['marker'] = v_selected.MarkerValidator() self._validators['textfont'] = v_selected.TextfontValidator() _v = arg.pop('marker', None) self['marker'] = (marker if (marker is not None) else _v) _v = arg.pop('textfont', None) self['textfont'] = (textfont if (textfont is not None) else _v) self._process_kwargs(**dict(arg, **kwargs)) self._skip_invalid = False
@property def autocolorscale(self): "\n Determines whether the colorscale is a default palette\n (`autocolorscale: true`) or the palette determined by\n `marker.colorscale`. Has an effect only if in `marker.color`is\n set to a numerical array. In case `colorscale` is unspecified\n or `autocolorscale` is true, the default palette will be\n chosen according to whether numbers in the `color` array are\n all positive, all negative or mixed.\n \n The 'autocolorscale' property must be specified as a bool\n (either True, or False)\n\n Returns\n -------\n bool\n " return self['autocolorscale']
8,397,201,591,515,050,000
Determines whether the colorscale is a default palette (`autocolorscale: true`) or the palette determined by `marker.colorscale`. Has an effect only if in `marker.color`is set to a numerical array. In case `colorscale` is unspecified or `autocolorscale` is true, the default palette will be chosen according to whether numbers in the `color` array are all positive, all negative or mixed. The 'autocolorscale' property must be specified as a bool (either True, or False) Returns ------- bool
plotly/graph_objs/scattergeo/__init__.py
autocolorscale
Jonathan-MW/plotly.py
python
@property def autocolorscale(self): "\n Determines whether the colorscale is a default palette\n (`autocolorscale: true`) or the palette determined by\n `marker.colorscale`. Has an effect only if in `marker.color`is\n set to a numerical array. In case `colorscale` is unspecified\n or `autocolorscale` is true, the default palette will be\n chosen according to whether numbers in the `color` array are\n all positive, all negative or mixed.\n \n The 'autocolorscale' property must be specified as a bool\n (either True, or False)\n\n Returns\n -------\n bool\n " return self['autocolorscale']
@property def cauto(self): "\n Determines whether or not the color domain is computed with\n respect to the input data (here in `marker.color`) or the\n bounds set in `marker.cmin` and `marker.cmax` Has an effect\n only if in `marker.color`is set to a numerical array. Defaults\n to `false` when `marker.cmin` and `marker.cmax` are set by the\n user.\n \n The 'cauto' property must be specified as a bool\n (either True, or False)\n\n Returns\n -------\n bool\n " return self['cauto']
-7,671,358,552,002,983,000
Determines whether or not the color domain is computed with respect to the input data (here in `marker.color`) or the bounds set in `marker.cmin` and `marker.cmax` Has an effect only if in `marker.color`is set to a numerical array. Defaults to `false` when `marker.cmin` and `marker.cmax` are set by the user. The 'cauto' property must be specified as a bool (either True, or False) Returns ------- bool
plotly/graph_objs/scattergeo/__init__.py
cauto
Jonathan-MW/plotly.py
python
@property def cauto(self): "\n Determines whether or not the color domain is computed with\n respect to the input data (here in `marker.color`) or the\n bounds set in `marker.cmin` and `marker.cmax` Has an effect\n only if in `marker.color`is set to a numerical array. Defaults\n to `false` when `marker.cmin` and `marker.cmax` are set by the\n user.\n \n The 'cauto' property must be specified as a bool\n (either True, or False)\n\n Returns\n -------\n bool\n " return self['cauto']
@property def cmax(self): "\n Sets the upper bound of the color domain. Has an effect only if\n in `marker.color`is set to a numerical array. Value should have\n the same units as in `marker.color` and if set, `marker.cmin`\n must be set as well.\n \n The 'cmax' property is a number and may be specified as:\n - An int or float\n\n Returns\n -------\n int|float\n " return self['cmax']
4,402,550,313,397,268,500
Sets the upper bound of the color domain. Has an effect only if in `marker.color`is set to a numerical array. Value should have the same units as in `marker.color` and if set, `marker.cmin` must be set as well. The 'cmax' property is a number and may be specified as: - An int or float Returns ------- int|float
plotly/graph_objs/scattergeo/__init__.py
cmax
Jonathan-MW/plotly.py
python
@property def cmax(self): "\n Sets the upper bound of the color domain. Has an effect only if\n in `marker.color`is set to a numerical array. Value should have\n the same units as in `marker.color` and if set, `marker.cmin`\n must be set as well.\n \n The 'cmax' property is a number and may be specified as:\n - An int or float\n\n Returns\n -------\n int|float\n " return self['cmax']
@property def cmid(self): "\n Sets the mid-point of the color domain by scaling `marker.cmin`\n and/or `marker.cmax` to be equidistant to this point. Has an\n effect only if in `marker.color`is set to a numerical array.\n Value should have the same units as in `marker.color`. Has no\n effect when `marker.cauto` is `false`.\n \n The 'cmid' property is a number and may be specified as:\n - An int or float\n\n Returns\n -------\n int|float\n " return self['cmid']
-6,879,592,299,899,489,000
Sets the mid-point of the color domain by scaling `marker.cmin` and/or `marker.cmax` to be equidistant to this point. Has an effect only if in `marker.color`is set to a numerical array. Value should have the same units as in `marker.color`. Has no effect when `marker.cauto` is `false`. The 'cmid' property is a number and may be specified as: - An int or float Returns ------- int|float
plotly/graph_objs/scattergeo/__init__.py
cmid
Jonathan-MW/plotly.py
python
@property def cmid(self): "\n Sets the mid-point of the color domain by scaling `marker.cmin`\n and/or `marker.cmax` to be equidistant to this point. Has an\n effect only if in `marker.color`is set to a numerical array.\n Value should have the same units as in `marker.color`. Has no\n effect when `marker.cauto` is `false`.\n \n The 'cmid' property is a number and may be specified as:\n - An int or float\n\n Returns\n -------\n int|float\n " return self['cmid']
@property def cmin(self): "\n Sets the lower bound of the color domain. Has an effect only if\n in `marker.color`is set to a numerical array. Value should have\n the same units as in `marker.color` and if set, `marker.cmax`\n must be set as well.\n \n The 'cmin' property is a number and may be specified as:\n - An int or float\n\n Returns\n -------\n int|float\n " return self['cmin']
7,058,444,409,272,556,000
Sets the lower bound of the color domain. Has an effect only if in `marker.color`is set to a numerical array. Value should have the same units as in `marker.color` and if set, `marker.cmax` must be set as well. The 'cmin' property is a number and may be specified as: - An int or float Returns ------- int|float
plotly/graph_objs/scattergeo/__init__.py
cmin
Jonathan-MW/plotly.py
python
@property def cmin(self): "\n Sets the lower bound of the color domain. Has an effect only if\n in `marker.color`is set to a numerical array. Value should have\n the same units as in `marker.color` and if set, `marker.cmax`\n must be set as well.\n \n The 'cmin' property is a number and may be specified as:\n - An int or float\n\n Returns\n -------\n int|float\n " return self['cmin']
@property def color(self): "\n Sets themarkercolor. It accepts either a specific color or an\n array of numbers that are mapped to the colorscale relative to\n the max and min values of the array or relative to\n `marker.cmin` and `marker.cmax` if set.\n \n The 'color' property is a color and may be specified as:\n - A hex string (e.g. '#ff0000')\n - An rgb/rgba string (e.g. 'rgb(255,0,0)')\n - An hsl/hsla string (e.g. 'hsl(0,100%,50%)')\n - An hsv/hsva string (e.g. 'hsv(0,100%,100%)')\n - A named CSS color:\n aliceblue, antiquewhite, aqua, aquamarine, azure,\n beige, bisque, black, blanchedalmond, blue,\n blueviolet, brown, burlywood, cadetblue,\n chartreuse, chocolate, coral, cornflowerblue,\n cornsilk, crimson, cyan, darkblue, darkcyan,\n darkgoldenrod, darkgray, darkgrey, darkgreen,\n darkkhaki, darkmagenta, darkolivegreen, darkorange,\n darkorchid, darkred, darksalmon, darkseagreen,\n darkslateblue, darkslategray, darkslategrey,\n darkturquoise, darkviolet, deeppink, deepskyblue,\n dimgray, dimgrey, dodgerblue, firebrick,\n floralwhite, forestgreen, fuchsia, gainsboro,\n ghostwhite, gold, goldenrod, gray, grey, green,\n greenyellow, honeydew, hotpink, indianred, indigo,\n ivory, khaki, lavender, lavenderblush, lawngreen,\n lemonchiffon, lightblue, lightcoral, lightcyan,\n lightgoldenrodyellow, lightgray, lightgrey,\n lightgreen, lightpink, lightsalmon, lightseagreen,\n lightskyblue, lightslategray, lightslategrey,\n lightsteelblue, lightyellow, lime, limegreen,\n linen, magenta, maroon, mediumaquamarine,\n mediumblue, mediumorchid, mediumpurple,\n mediumseagreen, mediumslateblue, mediumspringgreen,\n mediumturquoise, mediumvioletred, midnightblue,\n mintcream, mistyrose, moccasin, navajowhite, navy,\n oldlace, olive, olivedrab, orange, orangered,\n orchid, palegoldenrod, palegreen, paleturquoise,\n palevioletred, papayawhip, peachpuff, peru, pink,\n plum, powderblue, purple, red, rosybrown,\n royalblue, saddlebrown, salmon, sandybrown,\n seagreen, seashell, sienna, silver, skyblue,\n slateblue, slategray, slategrey, snow, springgreen,\n steelblue, tan, teal, thistle, tomato, turquoise,\n violet, wheat, white, whitesmoke, yellow,\n yellowgreen\n - A number that will be interpreted as a color\n according to scattergeo.marker.colorscale\n - A list or array of any of the above\n\n Returns\n -------\n str|numpy.ndarray\n " return self['color']
-4,519,384,853,487,422,500
Sets themarkercolor. It accepts either a specific color or an array of numbers that are mapped to the colorscale relative to the max and min values of the array or relative to `marker.cmin` and `marker.cmax` if set. The 'color' property is a color and may be specified as: - A hex string (e.g. '#ff0000') - An rgb/rgba string (e.g. 'rgb(255,0,0)') - An hsl/hsla string (e.g. 'hsl(0,100%,50%)') - An hsv/hsva string (e.g. 'hsv(0,100%,100%)') - A named CSS color: aliceblue, antiquewhite, aqua, aquamarine, azure, beige, bisque, black, blanchedalmond, blue, blueviolet, brown, burlywood, cadetblue, chartreuse, chocolate, coral, cornflowerblue, cornsilk, crimson, cyan, darkblue, darkcyan, darkgoldenrod, darkgray, darkgrey, darkgreen, darkkhaki, darkmagenta, darkolivegreen, darkorange, darkorchid, darkred, darksalmon, darkseagreen, darkslateblue, darkslategray, darkslategrey, darkturquoise, darkviolet, deeppink, deepskyblue, dimgray, dimgrey, dodgerblue, firebrick, floralwhite, forestgreen, fuchsia, gainsboro, ghostwhite, gold, goldenrod, gray, grey, green, greenyellow, honeydew, hotpink, indianred, indigo, ivory, khaki, lavender, lavenderblush, lawngreen, lemonchiffon, lightblue, lightcoral, lightcyan, lightgoldenrodyellow, lightgray, lightgrey, lightgreen, lightpink, lightsalmon, lightseagreen, lightskyblue, lightslategray, lightslategrey, lightsteelblue, lightyellow, lime, limegreen, linen, magenta, maroon, mediumaquamarine, mediumblue, mediumorchid, mediumpurple, mediumseagreen, mediumslateblue, mediumspringgreen, mediumturquoise, mediumvioletred, midnightblue, mintcream, mistyrose, moccasin, navajowhite, navy, oldlace, olive, olivedrab, orange, orangered, orchid, palegoldenrod, palegreen, paleturquoise, palevioletred, papayawhip, peachpuff, peru, pink, plum, powderblue, purple, red, rosybrown, royalblue, saddlebrown, salmon, sandybrown, seagreen, seashell, sienna, silver, skyblue, slateblue, slategray, slategrey, snow, springgreen, steelblue, tan, teal, thistle, tomato, turquoise, violet, wheat, white, whitesmoke, yellow, yellowgreen - A number that will be interpreted as a color according to scattergeo.marker.colorscale - A list or array of any of the above Returns ------- str|numpy.ndarray
plotly/graph_objs/scattergeo/__init__.py
color
Jonathan-MW/plotly.py
python
@property def color(self): "\n Sets themarkercolor. It accepts either a specific color or an\n array of numbers that are mapped to the colorscale relative to\n the max and min values of the array or relative to\n `marker.cmin` and `marker.cmax` if set.\n \n The 'color' property is a color and may be specified as:\n - A hex string (e.g. '#ff0000')\n - An rgb/rgba string (e.g. 'rgb(255,0,0)')\n - An hsl/hsla string (e.g. 'hsl(0,100%,50%)')\n - An hsv/hsva string (e.g. 'hsv(0,100%,100%)')\n - A named CSS color:\n aliceblue, antiquewhite, aqua, aquamarine, azure,\n beige, bisque, black, blanchedalmond, blue,\n blueviolet, brown, burlywood, cadetblue,\n chartreuse, chocolate, coral, cornflowerblue,\n cornsilk, crimson, cyan, darkblue, darkcyan,\n darkgoldenrod, darkgray, darkgrey, darkgreen,\n darkkhaki, darkmagenta, darkolivegreen, darkorange,\n darkorchid, darkred, darksalmon, darkseagreen,\n darkslateblue, darkslategray, darkslategrey,\n darkturquoise, darkviolet, deeppink, deepskyblue,\n dimgray, dimgrey, dodgerblue, firebrick,\n floralwhite, forestgreen, fuchsia, gainsboro,\n ghostwhite, gold, goldenrod, gray, grey, green,\n greenyellow, honeydew, hotpink, indianred, indigo,\n ivory, khaki, lavender, lavenderblush, lawngreen,\n lemonchiffon, lightblue, lightcoral, lightcyan,\n lightgoldenrodyellow, lightgray, lightgrey,\n lightgreen, lightpink, lightsalmon, lightseagreen,\n lightskyblue, lightslategray, lightslategrey,\n lightsteelblue, lightyellow, lime, limegreen,\n linen, magenta, maroon, mediumaquamarine,\n mediumblue, mediumorchid, mediumpurple,\n mediumseagreen, mediumslateblue, mediumspringgreen,\n mediumturquoise, mediumvioletred, midnightblue,\n mintcream, mistyrose, moccasin, navajowhite, navy,\n oldlace, olive, olivedrab, orange, orangered,\n orchid, palegoldenrod, palegreen, paleturquoise,\n palevioletred, papayawhip, peachpuff, peru, pink,\n plum, powderblue, purple, red, rosybrown,\n royalblue, saddlebrown, salmon, sandybrown,\n seagreen, seashell, sienna, silver, skyblue,\n slateblue, slategray, slategrey, snow, springgreen,\n steelblue, tan, teal, thistle, tomato, turquoise,\n violet, wheat, white, whitesmoke, yellow,\n yellowgreen\n - A number that will be interpreted as a color\n according to scattergeo.marker.colorscale\n - A list or array of any of the above\n\n Returns\n -------\n str|numpy.ndarray\n " return self['color']
@property def coloraxis(self): '\n Sets a reference to a shared color axis. References to these\n shared color axes are "coloraxis", "coloraxis2", "coloraxis3",\n etc. Settings for these shared color axes are set in the\n layout, under `layout.coloraxis`, `layout.coloraxis2`, etc.\n Note that multiple color scales can be linked to the same color\n axis.\n \n The \'coloraxis\' property is an identifier of a particular\n subplot, of type \'coloraxis\', that may be specified as the string \'coloraxis\'\n optionally followed by an integer >= 1\n (e.g. \'coloraxis\', \'coloraxis1\', \'coloraxis2\', \'coloraxis3\', etc.)\n\n Returns\n -------\n str\n ' return self['coloraxis']
1,167,068,444,940,009,200
Sets a reference to a shared color axis. References to these shared color axes are "coloraxis", "coloraxis2", "coloraxis3", etc. Settings for these shared color axes are set in the layout, under `layout.coloraxis`, `layout.coloraxis2`, etc. Note that multiple color scales can be linked to the same color axis. The 'coloraxis' property is an identifier of a particular subplot, of type 'coloraxis', that may be specified as the string 'coloraxis' optionally followed by an integer >= 1 (e.g. 'coloraxis', 'coloraxis1', 'coloraxis2', 'coloraxis3', etc.) Returns ------- str
plotly/graph_objs/scattergeo/__init__.py
coloraxis
Jonathan-MW/plotly.py
python
@property def coloraxis(self): '\n Sets a reference to a shared color axis. References to these\n shared color axes are "coloraxis", "coloraxis2", "coloraxis3",\n etc. Settings for these shared color axes are set in the\n layout, under `layout.coloraxis`, `layout.coloraxis2`, etc.\n Note that multiple color scales can be linked to the same color\n axis.\n \n The \'coloraxis\' property is an identifier of a particular\n subplot, of type \'coloraxis\', that may be specified as the string \'coloraxis\'\n optionally followed by an integer >= 1\n (e.g. \'coloraxis\', \'coloraxis1\', \'coloraxis2\', \'coloraxis3\', etc.)\n\n Returns\n -------\n str\n ' return self['coloraxis']
@property def colorbar(self): '\n The \'colorbar\' property is an instance of ColorBar\n that may be specified as:\n - An instance of plotly.graph_objs.scattergeo.marker.ColorBar\n - A dict of string/value properties that will be passed\n to the ColorBar constructor\n \n Supported dict properties:\n \n bgcolor\n Sets the color of padded area.\n bordercolor\n Sets the axis line color.\n borderwidth\n Sets the width (in px) or the border enclosing\n this color bar.\n dtick\n Sets the step in-between ticks on this axis.\n Use with `tick0`. Must be a positive number, or\n special strings available to "log" and "date"\n axes. If the axis `type` is "log", then ticks\n are set every 10^(n*dtick) where n is the tick\n number. For example, to set a tick mark at 1,\n 10, 100, 1000, ... set dtick to 1. To set tick\n marks at 1, 100, 10000, ... set dtick to 2. To\n set tick marks at 1, 5, 25, 125, 625, 3125, ...\n set dtick to log_10(5), or 0.69897000433. "log"\n has several special values; "L<f>", where `f`\n is a positive number, gives ticks linearly\n spaced in value (but not position). For example\n `tick0` = 0.1, `dtick` = "L0.5" will put ticks\n at 0.1, 0.6, 1.1, 1.6 etc. To show powers of 10\n plus small digits between, use "D1" (all\n digits) or "D2" (only 2 and 5). `tick0` is\n ignored for "D1" and "D2". If the axis `type`\n is "date", then you must convert the time to\n milliseconds. For example, to set the interval\n between ticks to one day, set `dtick` to\n 86400000.0. "date" also has special values\n "M<n>" gives ticks spaced by a number of\n months. `n` must be a positive integer. To set\n ticks on the 15th of every third month, set\n `tick0` to "2000-01-15" and `dtick` to "M3". To\n set ticks every 4 years, set `dtick` to "M48"\n exponentformat\n Determines a formatting rule for the tick\n exponents. For example, consider the number\n 1,000,000,000. If "none", it appears as\n 1,000,000,000. If "e", 1e+9. If "E", 1E+9. If\n "power", 1x10^9 (with 9 in a super script). If\n "SI", 1G. If "B", 1B.\n len\n Sets the length of the color bar This measure\n excludes the padding of both ends. That is, the\n color bar length is this length minus the\n padding on both ends.\n lenmode\n Determines whether this color bar\'s length\n (i.e. the measure in the color variation\n direction) is set in units of plot "fraction"\n or in *pixels. Use `len` to set the value.\n nticks\n Specifies the maximum number of ticks for the\n particular axis. The actual number of ticks\n will be chosen automatically to be less than or\n equal to `nticks`. Has an effect only if\n `tickmode` is set to "auto".\n outlinecolor\n Sets the axis line color.\n outlinewidth\n Sets the width (in px) of the axis line.\n separatethousands\n If "true", even 4-digit integers are separated\n showexponent\n If "all", all exponents are shown besides their\n significands. If "first", only the exponent of\n the first tick is shown. If "last", only the\n exponent of the last tick is shown. If "none",\n no exponents appear.\n showticklabels\n Determines whether or not the tick labels are\n drawn.\n showtickprefix\n If "all", all tick labels are displayed with a\n prefix. If "first", only the first tick is\n displayed with a prefix. If "last", only the\n last tick is displayed with a suffix. If\n "none", tick prefixes are hidden.\n showticksuffix\n Same as `showtickprefix` but for tick suffixes.\n thickness\n Sets the thickness of the color bar This\n measure excludes the size of the padding, ticks\n and labels.\n thicknessmode\n Determines whether this color bar\'s thickness\n (i.e. the measure in the constant color\n direction) is set in units of plot "fraction"\n or in "pixels". Use `thickness` to set the\n value.\n tick0\n Sets the placement of the first tick on this\n axis. Use with `dtick`. If the axis `type` is\n "log", then you must take the log of your\n starting tick (e.g. to set the starting tick to\n 100, set the `tick0` to 2) except when\n `dtick`=*L<f>* (see `dtick` for more info). If\n the axis `type` is "date", it should be a date\n string, like date data. If the axis `type` is\n "category", it should be a number, using the\n scale where each category is assigned a serial\n number from zero in the order it appears.\n tickangle\n Sets the angle of the tick labels with respect\n to the horizontal. For example, a `tickangle`\n of -90 draws the tick labels vertically.\n tickcolor\n Sets the tick color.\n tickfont\n Sets the color bar\'s tick label font\n tickformat\n Sets the tick label formatting rule using d3\n formatting mini-languages which are very\n similar to those in Python. For numbers, see: h\n ttps://github.com/d3/d3-format/blob/master/READ\n ME.md#locale_format And for dates see:\n https://github.com/d3/d3-time-\n format/blob/master/README.md#locale_format We\n add one item to d3\'s date formatter: "%{n}f"\n for fractional seconds with n digits. For\n example, *2016-10-13 09:15:23.456* with\n tickformat "%H~%M~%S.%2f" would display\n "09~15~23.46"\n tickformatstops\n plotly.graph_objs.scattergeo.marker.colorbar.Ti\n ckformatstop instance or dict with compatible\n properties\n tickformatstopdefaults\n When used in a template (as layout.template.dat\n a.scattergeo.marker.colorbar.tickformatstopdefa\n ults), sets the default property values to use\n for elements of\n scattergeo.marker.colorbar.tickformatstops\n ticklen\n Sets the tick length (in px).\n tickmode\n Sets the tick mode for this axis. If "auto",\n the number of ticks is set via `nticks`. If\n "linear", the placement of the ticks is\n determined by a starting position `tick0` and a\n tick step `dtick` ("linear" is the default\n value if `tick0` and `dtick` are provided). If\n "array", the placement of the ticks is set via\n `tickvals` and the tick text is `ticktext`.\n ("array" is the default value if `tickvals` is\n provided).\n tickprefix\n Sets a tick label prefix.\n ticks\n Determines whether ticks are drawn or not. If\n "", this axis\' ticks are not drawn. If\n "outside" ("inside"), this axis\' are drawn\n outside (inside) the axis lines.\n ticksuffix\n Sets a tick label suffix.\n ticktext\n Sets the text displayed at the ticks position\n via `tickvals`. Only has an effect if\n `tickmode` is set to "array". Used with\n `tickvals`.\n ticktextsrc\n Sets the source reference on plot.ly for\n ticktext .\n tickvals\n Sets the values at which ticks on this axis\n appear. Only has an effect if `tickmode` is set\n to "array". Used with `ticktext`.\n tickvalssrc\n Sets the source reference on plot.ly for\n tickvals .\n tickwidth\n Sets the tick width (in px).\n title\n plotly.graph_objs.scattergeo.marker.colorbar.Ti\n tle instance or dict with compatible properties\n titlefont\n Deprecated: Please use\n scattergeo.marker.colorbar.title.font instead.\n Sets this color bar\'s title font. Note that the\n title\'s font used to be set by the now\n deprecated `titlefont` attribute.\n titleside\n Deprecated: Please use\n scattergeo.marker.colorbar.title.side instead.\n Determines the location of color bar\'s title\n with respect to the color bar. Note that the\n title\'s location used to be set by the now\n deprecated `titleside` attribute.\n x\n Sets the x position of the color bar (in plot\n fraction).\n xanchor\n Sets this color bar\'s horizontal position\n anchor. This anchor binds the `x` position to\n the "left", "center" or "right" of the color\n bar.\n xpad\n Sets the amount of padding (in px) along the x\n direction.\n y\n Sets the y position of the color bar (in plot\n fraction).\n yanchor\n Sets this color bar\'s vertical position anchor\n This anchor binds the `y` position to the\n "top", "middle" or "bottom" of the color bar.\n ypad\n Sets the amount of padding (in px) along the y\n direction.\n\n Returns\n -------\n plotly.graph_objs.scattergeo.marker.ColorBar\n ' return self['colorbar']
-2,189,857,872,392,623,900
The 'colorbar' property is an instance of ColorBar that may be specified as: - An instance of plotly.graph_objs.scattergeo.marker.ColorBar - A dict of string/value properties that will be passed to the ColorBar constructor Supported dict properties: bgcolor Sets the color of padded area. bordercolor Sets the axis line color. borderwidth Sets the width (in px) or the border enclosing this color bar. dtick Sets the step in-between ticks on this axis. Use with `tick0`. Must be a positive number, or special strings available to "log" and "date" axes. If the axis `type` is "log", then ticks are set every 10^(n*dtick) where n is the tick number. For example, to set a tick mark at 1, 10, 100, 1000, ... set dtick to 1. To set tick marks at 1, 100, 10000, ... set dtick to 2. To set tick marks at 1, 5, 25, 125, 625, 3125, ... set dtick to log_10(5), or 0.69897000433. "log" has several special values; "L<f>", where `f` is a positive number, gives ticks linearly spaced in value (but not position). For example `tick0` = 0.1, `dtick` = "L0.5" will put ticks at 0.1, 0.6, 1.1, 1.6 etc. To show powers of 10 plus small digits between, use "D1" (all digits) or "D2" (only 2 and 5). `tick0` is ignored for "D1" and "D2". If the axis `type` is "date", then you must convert the time to milliseconds. For example, to set the interval between ticks to one day, set `dtick` to 86400000.0. "date" also has special values "M<n>" gives ticks spaced by a number of months. `n` must be a positive integer. To set ticks on the 15th of every third month, set `tick0` to "2000-01-15" and `dtick` to "M3". To set ticks every 4 years, set `dtick` to "M48" exponentformat Determines a formatting rule for the tick exponents. For example, consider the number 1,000,000,000. If "none", it appears as 1,000,000,000. If "e", 1e+9. If "E", 1E+9. If "power", 1x10^9 (with 9 in a super script). If "SI", 1G. If "B", 1B. len Sets the length of the color bar This measure excludes the padding of both ends. That is, the color bar length is this length minus the padding on both ends. lenmode Determines whether this color bar's length (i.e. the measure in the color variation direction) is set in units of plot "fraction" or in *pixels. Use `len` to set the value. nticks Specifies the maximum number of ticks for the particular axis. The actual number of ticks will be chosen automatically to be less than or equal to `nticks`. Has an effect only if `tickmode` is set to "auto". outlinecolor Sets the axis line color. outlinewidth Sets the width (in px) of the axis line. separatethousands If "true", even 4-digit integers are separated showexponent If "all", all exponents are shown besides their significands. If "first", only the exponent of the first tick is shown. If "last", only the exponent of the last tick is shown. If "none", no exponents appear. showticklabels Determines whether or not the tick labels are drawn. showtickprefix If "all", all tick labels are displayed with a prefix. If "first", only the first tick is displayed with a prefix. If "last", only the last tick is displayed with a suffix. If "none", tick prefixes are hidden. showticksuffix Same as `showtickprefix` but for tick suffixes. thickness Sets the thickness of the color bar This measure excludes the size of the padding, ticks and labels. thicknessmode Determines whether this color bar's thickness (i.e. the measure in the constant color direction) is set in units of plot "fraction" or in "pixels". Use `thickness` to set the value. tick0 Sets the placement of the first tick on this axis. Use with `dtick`. If the axis `type` is "log", then you must take the log of your starting tick (e.g. to set the starting tick to 100, set the `tick0` to 2) except when `dtick`=*L<f>* (see `dtick` for more info). If the axis `type` is "date", it should be a date string, like date data. If the axis `type` is "category", it should be a number, using the scale where each category is assigned a serial number from zero in the order it appears. tickangle Sets the angle of the tick labels with respect to the horizontal. For example, a `tickangle` of -90 draws the tick labels vertically. tickcolor Sets the tick color. tickfont Sets the color bar's tick label font tickformat Sets the tick label formatting rule using d3 formatting mini-languages which are very similar to those in Python. For numbers, see: h ttps://github.com/d3/d3-format/blob/master/READ ME.md#locale_format And for dates see: https://github.com/d3/d3-time- format/blob/master/README.md#locale_format We add one item to d3's date formatter: "%{n}f" for fractional seconds with n digits. For example, *2016-10-13 09:15:23.456* with tickformat "%H~%M~%S.%2f" would display "09~15~23.46" tickformatstops plotly.graph_objs.scattergeo.marker.colorbar.Ti ckformatstop instance or dict with compatible properties tickformatstopdefaults When used in a template (as layout.template.dat a.scattergeo.marker.colorbar.tickformatstopdefa ults), sets the default property values to use for elements of scattergeo.marker.colorbar.tickformatstops ticklen Sets the tick length (in px). tickmode Sets the tick mode for this axis. If "auto", the number of ticks is set via `nticks`. If "linear", the placement of the ticks is determined by a starting position `tick0` and a tick step `dtick` ("linear" is the default value if `tick0` and `dtick` are provided). If "array", the placement of the ticks is set via `tickvals` and the tick text is `ticktext`. ("array" is the default value if `tickvals` is provided). tickprefix Sets a tick label prefix. ticks Determines whether ticks are drawn or not. If "", this axis' ticks are not drawn. If "outside" ("inside"), this axis' are drawn outside (inside) the axis lines. ticksuffix Sets a tick label suffix. ticktext Sets the text displayed at the ticks position via `tickvals`. Only has an effect if `tickmode` is set to "array". Used with `tickvals`. ticktextsrc Sets the source reference on plot.ly for ticktext . tickvals Sets the values at which ticks on this axis appear. Only has an effect if `tickmode` is set to "array". Used with `ticktext`. tickvalssrc Sets the source reference on plot.ly for tickvals . tickwidth Sets the tick width (in px). title plotly.graph_objs.scattergeo.marker.colorbar.Ti tle instance or dict with compatible properties titlefont Deprecated: Please use scattergeo.marker.colorbar.title.font instead. Sets this color bar's title font. Note that the title's font used to be set by the now deprecated `titlefont` attribute. titleside Deprecated: Please use scattergeo.marker.colorbar.title.side instead. Determines the location of color bar's title with respect to the color bar. Note that the title's location used to be set by the now deprecated `titleside` attribute. x Sets the x position of the color bar (in plot fraction). xanchor Sets this color bar's horizontal position anchor. This anchor binds the `x` position to the "left", "center" or "right" of the color bar. xpad Sets the amount of padding (in px) along the x direction. y Sets the y position of the color bar (in plot fraction). yanchor Sets this color bar's vertical position anchor This anchor binds the `y` position to the "top", "middle" or "bottom" of the color bar. ypad Sets the amount of padding (in px) along the y direction. Returns ------- plotly.graph_objs.scattergeo.marker.ColorBar
plotly/graph_objs/scattergeo/__init__.py
colorbar
Jonathan-MW/plotly.py
python
@property def colorbar(self): '\n The \'colorbar\' property is an instance of ColorBar\n that may be specified as:\n - An instance of plotly.graph_objs.scattergeo.marker.ColorBar\n - A dict of string/value properties that will be passed\n to the ColorBar constructor\n \n Supported dict properties:\n \n bgcolor\n Sets the color of padded area.\n bordercolor\n Sets the axis line color.\n borderwidth\n Sets the width (in px) or the border enclosing\n this color bar.\n dtick\n Sets the step in-between ticks on this axis.\n Use with `tick0`. Must be a positive number, or\n special strings available to "log" and "date"\n axes. If the axis `type` is "log", then ticks\n are set every 10^(n*dtick) where n is the tick\n number. For example, to set a tick mark at 1,\n 10, 100, 1000, ... set dtick to 1. To set tick\n marks at 1, 100, 10000, ... set dtick to 2. To\n set tick marks at 1, 5, 25, 125, 625, 3125, ...\n set dtick to log_10(5), or 0.69897000433. "log"\n has several special values; "L<f>", where `f`\n is a positive number, gives ticks linearly\n spaced in value (but not position). For example\n `tick0` = 0.1, `dtick` = "L0.5" will put ticks\n at 0.1, 0.6, 1.1, 1.6 etc. To show powers of 10\n plus small digits between, use "D1" (all\n digits) or "D2" (only 2 and 5). `tick0` is\n ignored for "D1" and "D2". If the axis `type`\n is "date", then you must convert the time to\n milliseconds. For example, to set the interval\n between ticks to one day, set `dtick` to\n 86400000.0. "date" also has special values\n "M<n>" gives ticks spaced by a number of\n months. `n` must be a positive integer. To set\n ticks on the 15th of every third month, set\n `tick0` to "2000-01-15" and `dtick` to "M3". To\n set ticks every 4 years, set `dtick` to "M48"\n exponentformat\n Determines a formatting rule for the tick\n exponents. For example, consider the number\n 1,000,000,000. If "none", it appears as\n 1,000,000,000. If "e", 1e+9. If "E", 1E+9. If\n "power", 1x10^9 (with 9 in a super script). If\n "SI", 1G. If "B", 1B.\n len\n Sets the length of the color bar This measure\n excludes the padding of both ends. That is, the\n color bar length is this length minus the\n padding on both ends.\n lenmode\n Determines whether this color bar\'s length\n (i.e. the measure in the color variation\n direction) is set in units of plot "fraction"\n or in *pixels. Use `len` to set the value.\n nticks\n Specifies the maximum number of ticks for the\n particular axis. The actual number of ticks\n will be chosen automatically to be less than or\n equal to `nticks`. Has an effect only if\n `tickmode` is set to "auto".\n outlinecolor\n Sets the axis line color.\n outlinewidth\n Sets the width (in px) of the axis line.\n separatethousands\n If "true", even 4-digit integers are separated\n showexponent\n If "all", all exponents are shown besides their\n significands. If "first", only the exponent of\n the first tick is shown. If "last", only the\n exponent of the last tick is shown. If "none",\n no exponents appear.\n showticklabels\n Determines whether or not the tick labels are\n drawn.\n showtickprefix\n If "all", all tick labels are displayed with a\n prefix. If "first", only the first tick is\n displayed with a prefix. If "last", only the\n last tick is displayed with a suffix. If\n "none", tick prefixes are hidden.\n showticksuffix\n Same as `showtickprefix` but for tick suffixes.\n thickness\n Sets the thickness of the color bar This\n measure excludes the size of the padding, ticks\n and labels.\n thicknessmode\n Determines whether this color bar\'s thickness\n (i.e. the measure in the constant color\n direction) is set in units of plot "fraction"\n or in "pixels". Use `thickness` to set the\n value.\n tick0\n Sets the placement of the first tick on this\n axis. Use with `dtick`. If the axis `type` is\n "log", then you must take the log of your\n starting tick (e.g. to set the starting tick to\n 100, set the `tick0` to 2) except when\n `dtick`=*L<f>* (see `dtick` for more info). If\n the axis `type` is "date", it should be a date\n string, like date data. If the axis `type` is\n "category", it should be a number, using the\n scale where each category is assigned a serial\n number from zero in the order it appears.\n tickangle\n Sets the angle of the tick labels with respect\n to the horizontal. For example, a `tickangle`\n of -90 draws the tick labels vertically.\n tickcolor\n Sets the tick color.\n tickfont\n Sets the color bar\'s tick label font\n tickformat\n Sets the tick label formatting rule using d3\n formatting mini-languages which are very\n similar to those in Python. For numbers, see: h\n ttps://github.com/d3/d3-format/blob/master/READ\n ME.md#locale_format And for dates see:\n https://github.com/d3/d3-time-\n format/blob/master/README.md#locale_format We\n add one item to d3\'s date formatter: "%{n}f"\n for fractional seconds with n digits. For\n example, *2016-10-13 09:15:23.456* with\n tickformat "%H~%M~%S.%2f" would display\n "09~15~23.46"\n tickformatstops\n plotly.graph_objs.scattergeo.marker.colorbar.Ti\n ckformatstop instance or dict with compatible\n properties\n tickformatstopdefaults\n When used in a template (as layout.template.dat\n a.scattergeo.marker.colorbar.tickformatstopdefa\n ults), sets the default property values to use\n for elements of\n scattergeo.marker.colorbar.tickformatstops\n ticklen\n Sets the tick length (in px).\n tickmode\n Sets the tick mode for this axis. If "auto",\n the number of ticks is set via `nticks`. If\n "linear", the placement of the ticks is\n determined by a starting position `tick0` and a\n tick step `dtick` ("linear" is the default\n value if `tick0` and `dtick` are provided). If\n "array", the placement of the ticks is set via\n `tickvals` and the tick text is `ticktext`.\n ("array" is the default value if `tickvals` is\n provided).\n tickprefix\n Sets a tick label prefix.\n ticks\n Determines whether ticks are drawn or not. If\n , this axis\' ticks are not drawn. If\n "outside" ("inside"), this axis\' are drawn\n outside (inside) the axis lines.\n ticksuffix\n Sets a tick label suffix.\n ticktext\n Sets the text displayed at the ticks position\n via `tickvals`. Only has an effect if\n `tickmode` is set to "array". Used with\n `tickvals`.\n ticktextsrc\n Sets the source reference on plot.ly for\n ticktext .\n tickvals\n Sets the values at which ticks on this axis\n appear. Only has an effect if `tickmode` is set\n to "array". Used with `ticktext`.\n tickvalssrc\n Sets the source reference on plot.ly for\n tickvals .\n tickwidth\n Sets the tick width (in px).\n title\n plotly.graph_objs.scattergeo.marker.colorbar.Ti\n tle instance or dict with compatible properties\n titlefont\n Deprecated: Please use\n scattergeo.marker.colorbar.title.font instead.\n Sets this color bar\'s title font. Note that the\n title\'s font used to be set by the now\n deprecated `titlefont` attribute.\n titleside\n Deprecated: Please use\n scattergeo.marker.colorbar.title.side instead.\n Determines the location of color bar\'s title\n with respect to the color bar. Note that the\n title\'s location used to be set by the now\n deprecated `titleside` attribute.\n x\n Sets the x position of the color bar (in plot\n fraction).\n xanchor\n Sets this color bar\'s horizontal position\n anchor. This anchor binds the `x` position to\n the "left", "center" or "right" of the color\n bar.\n xpad\n Sets the amount of padding (in px) along the x\n direction.\n y\n Sets the y position of the color bar (in plot\n fraction).\n yanchor\n Sets this color bar\'s vertical position anchor\n This anchor binds the `y` position to the\n "top", "middle" or "bottom" of the color bar.\n ypad\n Sets the amount of padding (in px) along the y\n direction.\n\n Returns\n -------\n plotly.graph_objs.scattergeo.marker.ColorBar\n ' return self['colorbar']
@property def colorscale(self): "\n Sets the colorscale. Has an effect only if in `marker.color`is\n set to a numerical array. The colorscale must be an array\n containing arrays mapping a normalized value to an rgb, rgba,\n hex, hsl, hsv, or named color string. At minimum, a mapping for\n the lowest (0) and highest (1) values are required. For\n example, `[[0, 'rgb(0,0,255)', [1, 'rgb(255,0,0)']]`. To\n control the bounds of the colorscale in color space,\n use`marker.cmin` and `marker.cmax`. Alternatively, `colorscale`\n may be a palette name string of the following list: Greys,YlGnB\n u,Greens,YlOrRd,Bluered,RdBu,Reds,Blues,Picnic,Rainbow,Portland\n ,Jet,Hot,Blackbody,Earth,Electric,Viridis,Cividis.\n \n The 'colorscale' property is a colorscale and may be\n specified as:\n - A list of 2-element lists where the first element is the\n normalized color level value (starting at 0 and ending at 1), \n and the second item is a valid color string.\n (e.g. [[0, 'green'], [0.5, 'red'], [1.0, 'rgb(0, 0, 255)']])\n - One of the following named colorscales:\n ['Greys', 'YlGnBu', 'Greens', 'YlOrRd', 'Bluered', 'RdBu',\n 'Reds', 'Blues', 'Picnic', 'Rainbow', 'Portland', 'Jet',\n 'Hot', 'Blackbody', 'Earth', 'Electric', 'Viridis', 'Cividis']\n\n Returns\n -------\n str\n " return self['colorscale']
-9,149,784,613,847,466,000
Sets the colorscale. Has an effect only if in `marker.color`is set to a numerical array. The colorscale must be an array containing arrays mapping a normalized value to an rgb, rgba, hex, hsl, hsv, or named color string. At minimum, a mapping for the lowest (0) and highest (1) values are required. For example, `[[0, 'rgb(0,0,255)', [1, 'rgb(255,0,0)']]`. To control the bounds of the colorscale in color space, use`marker.cmin` and `marker.cmax`. Alternatively, `colorscale` may be a palette name string of the following list: Greys,YlGnB u,Greens,YlOrRd,Bluered,RdBu,Reds,Blues,Picnic,Rainbow,Portland ,Jet,Hot,Blackbody,Earth,Electric,Viridis,Cividis. The 'colorscale' property is a colorscale and may be specified as: - A list of 2-element lists where the first element is the normalized color level value (starting at 0 and ending at 1), and the second item is a valid color string. (e.g. [[0, 'green'], [0.5, 'red'], [1.0, 'rgb(0, 0, 255)']]) - One of the following named colorscales: ['Greys', 'YlGnBu', 'Greens', 'YlOrRd', 'Bluered', 'RdBu', 'Reds', 'Blues', 'Picnic', 'Rainbow', 'Portland', 'Jet', 'Hot', 'Blackbody', 'Earth', 'Electric', 'Viridis', 'Cividis'] Returns ------- str
plotly/graph_objs/scattergeo/__init__.py
colorscale
Jonathan-MW/plotly.py
python
@property def colorscale(self): "\n Sets the colorscale. Has an effect only if in `marker.color`is\n set to a numerical array. The colorscale must be an array\n containing arrays mapping a normalized value to an rgb, rgba,\n hex, hsl, hsv, or named color string. At minimum, a mapping for\n the lowest (0) and highest (1) values are required. For\n example, `[[0, 'rgb(0,0,255)', [1, 'rgb(255,0,0)']]`. To\n control the bounds of the colorscale in color space,\n use`marker.cmin` and `marker.cmax`. Alternatively, `colorscale`\n may be a palette name string of the following list: Greys,YlGnB\n u,Greens,YlOrRd,Bluered,RdBu,Reds,Blues,Picnic,Rainbow,Portland\n ,Jet,Hot,Blackbody,Earth,Electric,Viridis,Cividis.\n \n The 'colorscale' property is a colorscale and may be\n specified as:\n - A list of 2-element lists where the first element is the\n normalized color level value (starting at 0 and ending at 1), \n and the second item is a valid color string.\n (e.g. [[0, 'green'], [0.5, 'red'], [1.0, 'rgb(0, 0, 255)']])\n - One of the following named colorscales:\n ['Greys', 'YlGnBu', 'Greens', 'YlOrRd', 'Bluered', 'RdBu',\n 'Reds', 'Blues', 'Picnic', 'Rainbow', 'Portland', 'Jet',\n 'Hot', 'Blackbody', 'Earth', 'Electric', 'Viridis', 'Cividis']\n\n Returns\n -------\n str\n " return self['colorscale']
@property def colorsrc(self): "\n Sets the source reference on plot.ly for color .\n \n The 'colorsrc' property must be specified as a string or\n as a plotly.grid_objs.Column object\n\n Returns\n -------\n str\n " return self['colorsrc']
-4,192,673,473,912,421,000
Sets the source reference on plot.ly for color . The 'colorsrc' property must be specified as a string or as a plotly.grid_objs.Column object Returns ------- str
plotly/graph_objs/scattergeo/__init__.py
colorsrc
Jonathan-MW/plotly.py
python
@property def colorsrc(self): "\n Sets the source reference on plot.ly for color .\n \n The 'colorsrc' property must be specified as a string or\n as a plotly.grid_objs.Column object\n\n Returns\n -------\n str\n " return self['colorsrc']
@property def gradient(self): "\n The 'gradient' property is an instance of Gradient\n that may be specified as:\n - An instance of plotly.graph_objs.scattergeo.marker.Gradient\n - A dict of string/value properties that will be passed\n to the Gradient constructor\n \n Supported dict properties:\n \n color\n Sets the final color of the gradient fill: the\n center color for radial, the right for\n horizontal, or the bottom for vertical.\n colorsrc\n Sets the source reference on plot.ly for color\n .\n type\n Sets the type of gradient used to fill the\n markers\n typesrc\n Sets the source reference on plot.ly for type\n .\n\n Returns\n -------\n plotly.graph_objs.scattergeo.marker.Gradient\n " return self['gradient']
1,551,385,271,055,478,300
The 'gradient' property is an instance of Gradient that may be specified as: - An instance of plotly.graph_objs.scattergeo.marker.Gradient - A dict of string/value properties that will be passed to the Gradient constructor Supported dict properties: color Sets the final color of the gradient fill: the center color for radial, the right for horizontal, or the bottom for vertical. colorsrc Sets the source reference on plot.ly for color . type Sets the type of gradient used to fill the markers typesrc Sets the source reference on plot.ly for type . Returns ------- plotly.graph_objs.scattergeo.marker.Gradient
plotly/graph_objs/scattergeo/__init__.py
gradient
Jonathan-MW/plotly.py
python
@property def gradient(self): "\n The 'gradient' property is an instance of Gradient\n that may be specified as:\n - An instance of plotly.graph_objs.scattergeo.marker.Gradient\n - A dict of string/value properties that will be passed\n to the Gradient constructor\n \n Supported dict properties:\n \n color\n Sets the final color of the gradient fill: the\n center color for radial, the right for\n horizontal, or the bottom for vertical.\n colorsrc\n Sets the source reference on plot.ly for color\n .\n type\n Sets the type of gradient used to fill the\n markers\n typesrc\n Sets the source reference on plot.ly for type\n .\n\n Returns\n -------\n plotly.graph_objs.scattergeo.marker.Gradient\n " return self['gradient']
@property def line(self): '\n The \'line\' property is an instance of Line\n that may be specified as:\n - An instance of plotly.graph_objs.scattergeo.marker.Line\n - A dict of string/value properties that will be passed\n to the Line constructor\n \n Supported dict properties:\n \n autocolorscale\n Determines whether the colorscale is a default\n palette (`autocolorscale: true`) or the palette\n determined by `marker.line.colorscale`. Has an\n effect only if in `marker.line.color`is set to\n a numerical array. In case `colorscale` is\n unspecified or `autocolorscale` is true, the\n default palette will be chosen according to\n whether numbers in the `color` array are all\n positive, all negative or mixed.\n cauto\n Determines whether or not the color domain is\n computed with respect to the input data (here\n in `marker.line.color`) or the bounds set in\n `marker.line.cmin` and `marker.line.cmax` Has\n an effect only if in `marker.line.color`is set\n to a numerical array. Defaults to `false` when\n `marker.line.cmin` and `marker.line.cmax` are\n set by the user.\n cmax\n Sets the upper bound of the color domain. Has\n an effect only if in `marker.line.color`is set\n to a numerical array. Value should have the\n same units as in `marker.line.color` and if\n set, `marker.line.cmin` must be set as well.\n cmid\n Sets the mid-point of the color domain by\n scaling `marker.line.cmin` and/or\n `marker.line.cmax` to be equidistant to this\n point. Has an effect only if in\n `marker.line.color`is set to a numerical array.\n Value should have the same units as in\n `marker.line.color`. Has no effect when\n `marker.line.cauto` is `false`.\n cmin\n Sets the lower bound of the color domain. Has\n an effect only if in `marker.line.color`is set\n to a numerical array. Value should have the\n same units as in `marker.line.color` and if\n set, `marker.line.cmax` must be set as well.\n color\n Sets themarker.linecolor. It accepts either a\n specific color or an array of numbers that are\n mapped to the colorscale relative to the max\n and min values of the array or relative to\n `marker.line.cmin` and `marker.line.cmax` if\n set.\n coloraxis\n Sets a reference to a shared color axis.\n References to these shared color axes are\n "coloraxis", "coloraxis2", "coloraxis3", etc.\n Settings for these shared color axes are set in\n the layout, under `layout.coloraxis`,\n `layout.coloraxis2`, etc. Note that multiple\n color scales can be linked to the same color\n axis.\n colorscale\n Sets the colorscale. Has an effect only if in\n `marker.line.color`is set to a numerical array.\n The colorscale must be an array containing\n arrays mapping a normalized value to an rgb,\n rgba, hex, hsl, hsv, or named color string. At\n minimum, a mapping for the lowest (0) and\n highest (1) values are required. For example,\n `[[0, \'rgb(0,0,255)\', [1, \'rgb(255,0,0)\']]`. To\n control the bounds of the colorscale in color\n space, use`marker.line.cmin` and\n `marker.line.cmax`. Alternatively, `colorscale`\n may be a palette name string of the following\n list: Greys,YlGnBu,Greens,YlOrRd,Bluered,RdBu,R\n eds,Blues,Picnic,Rainbow,Portland,Jet,Hot,Black\n body,Earth,Electric,Viridis,Cividis.\n colorsrc\n Sets the source reference on plot.ly for color\n .\n reversescale\n Reverses the color mapping if true. Has an\n effect only if in `marker.line.color`is set to\n a numerical array. If true, `marker.line.cmin`\n will correspond to the last color in the array\n and `marker.line.cmax` will correspond to the\n first color.\n width\n Sets the width (in px) of the lines bounding\n the marker points.\n widthsrc\n Sets the source reference on plot.ly for width\n .\n\n Returns\n -------\n plotly.graph_objs.scattergeo.marker.Line\n ' return self['line']
3,960,754,651,705,854,500
The 'line' property is an instance of Line that may be specified as: - An instance of plotly.graph_objs.scattergeo.marker.Line - A dict of string/value properties that will be passed to the Line constructor Supported dict properties: autocolorscale Determines whether the colorscale is a default palette (`autocolorscale: true`) or the palette determined by `marker.line.colorscale`. Has an effect only if in `marker.line.color`is set to a numerical array. In case `colorscale` is unspecified or `autocolorscale` is true, the default palette will be chosen according to whether numbers in the `color` array are all positive, all negative or mixed. cauto Determines whether or not the color domain is computed with respect to the input data (here in `marker.line.color`) or the bounds set in `marker.line.cmin` and `marker.line.cmax` Has an effect only if in `marker.line.color`is set to a numerical array. Defaults to `false` when `marker.line.cmin` and `marker.line.cmax` are set by the user. cmax Sets the upper bound of the color domain. Has an effect only if in `marker.line.color`is set to a numerical array. Value should have the same units as in `marker.line.color` and if set, `marker.line.cmin` must be set as well. cmid Sets the mid-point of the color domain by scaling `marker.line.cmin` and/or `marker.line.cmax` to be equidistant to this point. Has an effect only if in `marker.line.color`is set to a numerical array. Value should have the same units as in `marker.line.color`. Has no effect when `marker.line.cauto` is `false`. cmin Sets the lower bound of the color domain. Has an effect only if in `marker.line.color`is set to a numerical array. Value should have the same units as in `marker.line.color` and if set, `marker.line.cmax` must be set as well. color Sets themarker.linecolor. It accepts either a specific color or an array of numbers that are mapped to the colorscale relative to the max and min values of the array or relative to `marker.line.cmin` and `marker.line.cmax` if set. coloraxis Sets a reference to a shared color axis. References to these shared color axes are "coloraxis", "coloraxis2", "coloraxis3", etc. Settings for these shared color axes are set in the layout, under `layout.coloraxis`, `layout.coloraxis2`, etc. Note that multiple color scales can be linked to the same color axis. colorscale Sets the colorscale. Has an effect only if in `marker.line.color`is set to a numerical array. The colorscale must be an array containing arrays mapping a normalized value to an rgb, rgba, hex, hsl, hsv, or named color string. At minimum, a mapping for the lowest (0) and highest (1) values are required. For example, `[[0, 'rgb(0,0,255)', [1, 'rgb(255,0,0)']]`. To control the bounds of the colorscale in color space, use`marker.line.cmin` and `marker.line.cmax`. Alternatively, `colorscale` may be a palette name string of the following list: Greys,YlGnBu,Greens,YlOrRd,Bluered,RdBu,R eds,Blues,Picnic,Rainbow,Portland,Jet,Hot,Black body,Earth,Electric,Viridis,Cividis. colorsrc Sets the source reference on plot.ly for color . reversescale Reverses the color mapping if true. Has an effect only if in `marker.line.color`is set to a numerical array. If true, `marker.line.cmin` will correspond to the last color in the array and `marker.line.cmax` will correspond to the first color. width Sets the width (in px) of the lines bounding the marker points. widthsrc Sets the source reference on plot.ly for width . Returns ------- plotly.graph_objs.scattergeo.marker.Line
plotly/graph_objs/scattergeo/__init__.py
line
Jonathan-MW/plotly.py
python
@property def line(self): '\n The \'line\' property is an instance of Line\n that may be specified as:\n - An instance of plotly.graph_objs.scattergeo.marker.Line\n - A dict of string/value properties that will be passed\n to the Line constructor\n \n Supported dict properties:\n \n autocolorscale\n Determines whether the colorscale is a default\n palette (`autocolorscale: true`) or the palette\n determined by `marker.line.colorscale`. Has an\n effect only if in `marker.line.color`is set to\n a numerical array. In case `colorscale` is\n unspecified or `autocolorscale` is true, the\n default palette will be chosen according to\n whether numbers in the `color` array are all\n positive, all negative or mixed.\n cauto\n Determines whether or not the color domain is\n computed with respect to the input data (here\n in `marker.line.color`) or the bounds set in\n `marker.line.cmin` and `marker.line.cmax` Has\n an effect only if in `marker.line.color`is set\n to a numerical array. Defaults to `false` when\n `marker.line.cmin` and `marker.line.cmax` are\n set by the user.\n cmax\n Sets the upper bound of the color domain. Has\n an effect only if in `marker.line.color`is set\n to a numerical array. Value should have the\n same units as in `marker.line.color` and if\n set, `marker.line.cmin` must be set as well.\n cmid\n Sets the mid-point of the color domain by\n scaling `marker.line.cmin` and/or\n `marker.line.cmax` to be equidistant to this\n point. Has an effect only if in\n `marker.line.color`is set to a numerical array.\n Value should have the same units as in\n `marker.line.color`. Has no effect when\n `marker.line.cauto` is `false`.\n cmin\n Sets the lower bound of the color domain. Has\n an effect only if in `marker.line.color`is set\n to a numerical array. Value should have the\n same units as in `marker.line.color` and if\n set, `marker.line.cmax` must be set as well.\n color\n Sets themarker.linecolor. It accepts either a\n specific color or an array of numbers that are\n mapped to the colorscale relative to the max\n and min values of the array or relative to\n `marker.line.cmin` and `marker.line.cmax` if\n set.\n coloraxis\n Sets a reference to a shared color axis.\n References to these shared color axes are\n "coloraxis", "coloraxis2", "coloraxis3", etc.\n Settings for these shared color axes are set in\n the layout, under `layout.coloraxis`,\n `layout.coloraxis2`, etc. Note that multiple\n color scales can be linked to the same color\n axis.\n colorscale\n Sets the colorscale. Has an effect only if in\n `marker.line.color`is set to a numerical array.\n The colorscale must be an array containing\n arrays mapping a normalized value to an rgb,\n rgba, hex, hsl, hsv, or named color string. At\n minimum, a mapping for the lowest (0) and\n highest (1) values are required. For example,\n `[[0, \'rgb(0,0,255)\', [1, \'rgb(255,0,0)\']]`. To\n control the bounds of the colorscale in color\n space, use`marker.line.cmin` and\n `marker.line.cmax`. Alternatively, `colorscale`\n may be a palette name string of the following\n list: Greys,YlGnBu,Greens,YlOrRd,Bluered,RdBu,R\n eds,Blues,Picnic,Rainbow,Portland,Jet,Hot,Black\n body,Earth,Electric,Viridis,Cividis.\n colorsrc\n Sets the source reference on plot.ly for color\n .\n reversescale\n Reverses the color mapping if true. Has an\n effect only if in `marker.line.color`is set to\n a numerical array. If true, `marker.line.cmin`\n will correspond to the last color in the array\n and `marker.line.cmax` will correspond to the\n first color.\n width\n Sets the width (in px) of the lines bounding\n the marker points.\n widthsrc\n Sets the source reference on plot.ly for width\n .\n\n Returns\n -------\n plotly.graph_objs.scattergeo.marker.Line\n ' return self['line']
@property def opacity(self): "\n Sets the marker opacity.\n \n The 'opacity' property is a number and may be specified as:\n - An int or float in the interval [0, 1]\n - A tuple, list, or one-dimensional numpy array of the above\n\n Returns\n -------\n int|float|numpy.ndarray\n " return self['opacity']
-2,368,020,270,502,072,000
Sets the marker opacity. The 'opacity' property is a number and may be specified as: - An int or float in the interval [0, 1] - A tuple, list, or one-dimensional numpy array of the above Returns ------- int|float|numpy.ndarray
plotly/graph_objs/scattergeo/__init__.py
opacity
Jonathan-MW/plotly.py
python
@property def opacity(self): "\n Sets the marker opacity.\n \n The 'opacity' property is a number and may be specified as:\n - An int or float in the interval [0, 1]\n - A tuple, list, or one-dimensional numpy array of the above\n\n Returns\n -------\n int|float|numpy.ndarray\n " return self['opacity']
@property def opacitysrc(self): "\n Sets the source reference on plot.ly for opacity .\n \n The 'opacitysrc' property must be specified as a string or\n as a plotly.grid_objs.Column object\n\n Returns\n -------\n str\n " return self['opacitysrc']
2,076,524,803,020,138,800
Sets the source reference on plot.ly for opacity . The 'opacitysrc' property must be specified as a string or as a plotly.grid_objs.Column object Returns ------- str
plotly/graph_objs/scattergeo/__init__.py
opacitysrc
Jonathan-MW/plotly.py
python
@property def opacitysrc(self): "\n Sets the source reference on plot.ly for opacity .\n \n The 'opacitysrc' property must be specified as a string or\n as a plotly.grid_objs.Column object\n\n Returns\n -------\n str\n " return self['opacitysrc']
@property def reversescale(self): "\n Reverses the color mapping if true. Has an effect only if in\n `marker.color`is set to a numerical array. If true,\n `marker.cmin` will correspond to the last color in the array\n and `marker.cmax` will correspond to the first color.\n \n The 'reversescale' property must be specified as a bool\n (either True, or False)\n\n Returns\n -------\n bool\n " return self['reversescale']
6,328,202,226,544,627,000
Reverses the color mapping if true. Has an effect only if in `marker.color`is set to a numerical array. If true, `marker.cmin` will correspond to the last color in the array and `marker.cmax` will correspond to the first color. The 'reversescale' property must be specified as a bool (either True, or False) Returns ------- bool
plotly/graph_objs/scattergeo/__init__.py
reversescale
Jonathan-MW/plotly.py
python
@property def reversescale(self): "\n Reverses the color mapping if true. Has an effect only if in\n `marker.color`is set to a numerical array. If true,\n `marker.cmin` will correspond to the last color in the array\n and `marker.cmax` will correspond to the first color.\n \n The 'reversescale' property must be specified as a bool\n (either True, or False)\n\n Returns\n -------\n bool\n " return self['reversescale']
@property def showscale(self): "\n Determines whether or not a colorbar is displayed for this\n trace. Has an effect only if in `marker.color`is set to a\n numerical array.\n \n The 'showscale' property must be specified as a bool\n (either True, or False)\n\n Returns\n -------\n bool\n " return self['showscale']
3,057,608,871,260,698,600
Determines whether or not a colorbar is displayed for this trace. Has an effect only if in `marker.color`is set to a numerical array. The 'showscale' property must be specified as a bool (either True, or False) Returns ------- bool
plotly/graph_objs/scattergeo/__init__.py
showscale
Jonathan-MW/plotly.py
python
@property def showscale(self): "\n Determines whether or not a colorbar is displayed for this\n trace. Has an effect only if in `marker.color`is set to a\n numerical array.\n \n The 'showscale' property must be specified as a bool\n (either True, or False)\n\n Returns\n -------\n bool\n " return self['showscale']
@property def size(self): "\n Sets the marker size (in px).\n \n The 'size' property is a number and may be specified as:\n - An int or float in the interval [0, inf]\n - A tuple, list, or one-dimensional numpy array of the above\n\n Returns\n -------\n int|float|numpy.ndarray\n " return self['size']
4,148,300,277,957,588,500
Sets the marker size (in px). The 'size' property is a number and may be specified as: - An int or float in the interval [0, inf] - A tuple, list, or one-dimensional numpy array of the above Returns ------- int|float|numpy.ndarray
plotly/graph_objs/scattergeo/__init__.py
size
Jonathan-MW/plotly.py
python
@property def size(self): "\n Sets the marker size (in px).\n \n The 'size' property is a number and may be specified as:\n - An int or float in the interval [0, inf]\n - A tuple, list, or one-dimensional numpy array of the above\n\n Returns\n -------\n int|float|numpy.ndarray\n " return self['size']
@property def sizemin(self): "\n Has an effect only if `marker.size` is set to a numerical\n array. Sets the minimum size (in px) of the rendered marker\n points.\n \n The 'sizemin' property is a number and may be specified as:\n - An int or float in the interval [0, inf]\n\n Returns\n -------\n int|float\n " return self['sizemin']
7,459,494,275,914,576,000
Has an effect only if `marker.size` is set to a numerical array. Sets the minimum size (in px) of the rendered marker points. The 'sizemin' property is a number and may be specified as: - An int or float in the interval [0, inf] Returns ------- int|float
plotly/graph_objs/scattergeo/__init__.py
sizemin
Jonathan-MW/plotly.py
python
@property def sizemin(self): "\n Has an effect only if `marker.size` is set to a numerical\n array. Sets the minimum size (in px) of the rendered marker\n points.\n \n The 'sizemin' property is a number and may be specified as:\n - An int or float in the interval [0, inf]\n\n Returns\n -------\n int|float\n " return self['sizemin']
@property def sizemode(self): "\n Has an effect only if `marker.size` is set to a numerical\n array. Sets the rule for which the data in `size` is converted\n to pixels.\n \n The 'sizemode' property is an enumeration that may be specified as:\n - One of the following enumeration values:\n ['diameter', 'area']\n\n Returns\n -------\n Any\n " return self['sizemode']
2,682,637,820,718,035,500
Has an effect only if `marker.size` is set to a numerical array. Sets the rule for which the data in `size` is converted to pixels. The 'sizemode' property is an enumeration that may be specified as: - One of the following enumeration values: ['diameter', 'area'] Returns ------- Any
plotly/graph_objs/scattergeo/__init__.py
sizemode
Jonathan-MW/plotly.py
python
@property def sizemode(self): "\n Has an effect only if `marker.size` is set to a numerical\n array. Sets the rule for which the data in `size` is converted\n to pixels.\n \n The 'sizemode' property is an enumeration that may be specified as:\n - One of the following enumeration values:\n ['diameter', 'area']\n\n Returns\n -------\n Any\n " return self['sizemode']
@property def sizeref(self): "\n Has an effect only if `marker.size` is set to a numerical\n array. Sets the scale factor used to determine the rendered\n size of marker points. Use with `sizemin` and `sizemode`.\n \n The 'sizeref' property is a number and may be specified as:\n - An int or float\n\n Returns\n -------\n int|float\n " return self['sizeref']
-7,085,534,774,129,704,000
Has an effect only if `marker.size` is set to a numerical array. Sets the scale factor used to determine the rendered size of marker points. Use with `sizemin` and `sizemode`. The 'sizeref' property is a number and may be specified as: - An int or float Returns ------- int|float
plotly/graph_objs/scattergeo/__init__.py
sizeref
Jonathan-MW/plotly.py
python
@property def sizeref(self): "\n Has an effect only if `marker.size` is set to a numerical\n array. Sets the scale factor used to determine the rendered\n size of marker points. Use with `sizemin` and `sizemode`.\n \n The 'sizeref' property is a number and may be specified as:\n - An int or float\n\n Returns\n -------\n int|float\n " return self['sizeref']
@property def sizesrc(self): "\n Sets the source reference on plot.ly for size .\n \n The 'sizesrc' property must be specified as a string or\n as a plotly.grid_objs.Column object\n\n Returns\n -------\n str\n " return self['sizesrc']
8,585,030,508,644,112,000
Sets the source reference on plot.ly for size . The 'sizesrc' property must be specified as a string or as a plotly.grid_objs.Column object Returns ------- str
plotly/graph_objs/scattergeo/__init__.py
sizesrc
Jonathan-MW/plotly.py
python
@property def sizesrc(self): "\n Sets the source reference on plot.ly for size .\n \n The 'sizesrc' property must be specified as a string or\n as a plotly.grid_objs.Column object\n\n Returns\n -------\n str\n " return self['sizesrc']
@property def symbol(self): '\n Sets the marker symbol type. Adding 100 is equivalent to\n appending "-open" to a symbol name. Adding 200 is equivalent to\n appending "-dot" to a symbol name. Adding 300 is equivalent to\n appending "-open-dot" or "dot-open" to a symbol name.\n \n The \'symbol\' property is an enumeration that may be specified as:\n - One of the following enumeration values:\n [0, \'circle\', 100, \'circle-open\', 200, \'circle-dot\', 300,\n \'circle-open-dot\', 1, \'square\', 101, \'square-open\', 201,\n \'square-dot\', 301, \'square-open-dot\', 2, \'diamond\', 102,\n \'diamond-open\', 202, \'diamond-dot\', 302,\n \'diamond-open-dot\', 3, \'cross\', 103, \'cross-open\', 203,\n \'cross-dot\', 303, \'cross-open-dot\', 4, \'x\', 104, \'x-open\',\n 204, \'x-dot\', 304, \'x-open-dot\', 5, \'triangle-up\', 105,\n \'triangle-up-open\', 205, \'triangle-up-dot\', 305,\n \'triangle-up-open-dot\', 6, \'triangle-down\', 106,\n \'triangle-down-open\', 206, \'triangle-down-dot\', 306,\n \'triangle-down-open-dot\', 7, \'triangle-left\', 107,\n \'triangle-left-open\', 207, \'triangle-left-dot\', 307,\n \'triangle-left-open-dot\', 8, \'triangle-right\', 108,\n \'triangle-right-open\', 208, \'triangle-right-dot\', 308,\n \'triangle-right-open-dot\', 9, \'triangle-ne\', 109,\n \'triangle-ne-open\', 209, \'triangle-ne-dot\', 309,\n \'triangle-ne-open-dot\', 10, \'triangle-se\', 110,\n \'triangle-se-open\', 210, \'triangle-se-dot\', 310,\n \'triangle-se-open-dot\', 11, \'triangle-sw\', 111,\n \'triangle-sw-open\', 211, \'triangle-sw-dot\', 311,\n \'triangle-sw-open-dot\', 12, \'triangle-nw\', 112,\n \'triangle-nw-open\', 212, \'triangle-nw-dot\', 312,\n \'triangle-nw-open-dot\', 13, \'pentagon\', 113,\n \'pentagon-open\', 213, \'pentagon-dot\', 313,\n \'pentagon-open-dot\', 14, \'hexagon\', 114, \'hexagon-open\',\n 214, \'hexagon-dot\', 314, \'hexagon-open-dot\', 15,\n \'hexagon2\', 115, \'hexagon2-open\', 215, \'hexagon2-dot\',\n 315, \'hexagon2-open-dot\', 16, \'octagon\', 116,\n \'octagon-open\', 216, \'octagon-dot\', 316,\n \'octagon-open-dot\', 17, \'star\', 117, \'star-open\', 217,\n \'star-dot\', 317, \'star-open-dot\', 18, \'hexagram\', 118,\n \'hexagram-open\', 218, \'hexagram-dot\', 318,\n \'hexagram-open-dot\', 19, \'star-triangle-up\', 119,\n \'star-triangle-up-open\', 219, \'star-triangle-up-dot\', 319,\n \'star-triangle-up-open-dot\', 20, \'star-triangle-down\',\n 120, \'star-triangle-down-open\', 220,\n \'star-triangle-down-dot\', 320,\n \'star-triangle-down-open-dot\', 21, \'star-square\', 121,\n \'star-square-open\', 221, \'star-square-dot\', 321,\n \'star-square-open-dot\', 22, \'star-diamond\', 122,\n \'star-diamond-open\', 222, \'star-diamond-dot\', 322,\n \'star-diamond-open-dot\', 23, \'diamond-tall\', 123,\n \'diamond-tall-open\', 223, \'diamond-tall-dot\', 323,\n \'diamond-tall-open-dot\', 24, \'diamond-wide\', 124,\n \'diamond-wide-open\', 224, \'diamond-wide-dot\', 324,\n \'diamond-wide-open-dot\', 25, \'hourglass\', 125,\n \'hourglass-open\', 26, \'bowtie\', 126, \'bowtie-open\', 27,\n \'circle-cross\', 127, \'circle-cross-open\', 28, \'circle-x\',\n 128, \'circle-x-open\', 29, \'square-cross\', 129,\n \'square-cross-open\', 30, \'square-x\', 130, \'square-x-open\',\n 31, \'diamond-cross\', 131, \'diamond-cross-open\', 32,\n \'diamond-x\', 132, \'diamond-x-open\', 33, \'cross-thin\', 133,\n \'cross-thin-open\', 34, \'x-thin\', 134, \'x-thin-open\', 35,\n \'asterisk\', 135, \'asterisk-open\', 36, \'hash\', 136,\n \'hash-open\', 236, \'hash-dot\', 336, \'hash-open-dot\', 37,\n \'y-up\', 137, \'y-up-open\', 38, \'y-down\', 138,\n \'y-down-open\', 39, \'y-left\', 139, \'y-left-open\', 40,\n \'y-right\', 140, \'y-right-open\', 41, \'line-ew\', 141,\n \'line-ew-open\', 42, \'line-ns\', 142, \'line-ns-open\', 43,\n \'line-ne\', 143, \'line-ne-open\', 44, \'line-nw\', 144,\n \'line-nw-open\']\n - A tuple, list, or one-dimensional numpy array of the above\n\n Returns\n -------\n Any|numpy.ndarray\n ' return self['symbol']
4,035,250,182,230,725,000
Sets the marker symbol type. Adding 100 is equivalent to appending "-open" to a symbol name. Adding 200 is equivalent to appending "-dot" to a symbol name. Adding 300 is equivalent to appending "-open-dot" or "dot-open" to a symbol name. The 'symbol' property is an enumeration that may be specified as: - One of the following enumeration values: [0, 'circle', 100, 'circle-open', 200, 'circle-dot', 300, 'circle-open-dot', 1, 'square', 101, 'square-open', 201, 'square-dot', 301, 'square-open-dot', 2, 'diamond', 102, 'diamond-open', 202, 'diamond-dot', 302, 'diamond-open-dot', 3, 'cross', 103, 'cross-open', 203, 'cross-dot', 303, 'cross-open-dot', 4, 'x', 104, 'x-open', 204, 'x-dot', 304, 'x-open-dot', 5, 'triangle-up', 105, 'triangle-up-open', 205, 'triangle-up-dot', 305, 'triangle-up-open-dot', 6, 'triangle-down', 106, 'triangle-down-open', 206, 'triangle-down-dot', 306, 'triangle-down-open-dot', 7, 'triangle-left', 107, 'triangle-left-open', 207, 'triangle-left-dot', 307, 'triangle-left-open-dot', 8, 'triangle-right', 108, 'triangle-right-open', 208, 'triangle-right-dot', 308, 'triangle-right-open-dot', 9, 'triangle-ne', 109, 'triangle-ne-open', 209, 'triangle-ne-dot', 309, 'triangle-ne-open-dot', 10, 'triangle-se', 110, 'triangle-se-open', 210, 'triangle-se-dot', 310, 'triangle-se-open-dot', 11, 'triangle-sw', 111, 'triangle-sw-open', 211, 'triangle-sw-dot', 311, 'triangle-sw-open-dot', 12, 'triangle-nw', 112, 'triangle-nw-open', 212, 'triangle-nw-dot', 312, 'triangle-nw-open-dot', 13, 'pentagon', 113, 'pentagon-open', 213, 'pentagon-dot', 313, 'pentagon-open-dot', 14, 'hexagon', 114, 'hexagon-open', 214, 'hexagon-dot', 314, 'hexagon-open-dot', 15, 'hexagon2', 115, 'hexagon2-open', 215, 'hexagon2-dot', 315, 'hexagon2-open-dot', 16, 'octagon', 116, 'octagon-open', 216, 'octagon-dot', 316, 'octagon-open-dot', 17, 'star', 117, 'star-open', 217, 'star-dot', 317, 'star-open-dot', 18, 'hexagram', 118, 'hexagram-open', 218, 'hexagram-dot', 318, 'hexagram-open-dot', 19, 'star-triangle-up', 119, 'star-triangle-up-open', 219, 'star-triangle-up-dot', 319, 'star-triangle-up-open-dot', 20, 'star-triangle-down', 120, 'star-triangle-down-open', 220, 'star-triangle-down-dot', 320, 'star-triangle-down-open-dot', 21, 'star-square', 121, 'star-square-open', 221, 'star-square-dot', 321, 'star-square-open-dot', 22, 'star-diamond', 122, 'star-diamond-open', 222, 'star-diamond-dot', 322, 'star-diamond-open-dot', 23, 'diamond-tall', 123, 'diamond-tall-open', 223, 'diamond-tall-dot', 323, 'diamond-tall-open-dot', 24, 'diamond-wide', 124, 'diamond-wide-open', 224, 'diamond-wide-dot', 324, 'diamond-wide-open-dot', 25, 'hourglass', 125, 'hourglass-open', 26, 'bowtie', 126, 'bowtie-open', 27, 'circle-cross', 127, 'circle-cross-open', 28, 'circle-x', 128, 'circle-x-open', 29, 'square-cross', 129, 'square-cross-open', 30, 'square-x', 130, 'square-x-open', 31, 'diamond-cross', 131, 'diamond-cross-open', 32, 'diamond-x', 132, 'diamond-x-open', 33, 'cross-thin', 133, 'cross-thin-open', 34, 'x-thin', 134, 'x-thin-open', 35, 'asterisk', 135, 'asterisk-open', 36, 'hash', 136, 'hash-open', 236, 'hash-dot', 336, 'hash-open-dot', 37, 'y-up', 137, 'y-up-open', 38, 'y-down', 138, 'y-down-open', 39, 'y-left', 139, 'y-left-open', 40, 'y-right', 140, 'y-right-open', 41, 'line-ew', 141, 'line-ew-open', 42, 'line-ns', 142, 'line-ns-open', 43, 'line-ne', 143, 'line-ne-open', 44, 'line-nw', 144, 'line-nw-open'] - A tuple, list, or one-dimensional numpy array of the above Returns ------- Any|numpy.ndarray
plotly/graph_objs/scattergeo/__init__.py
symbol
Jonathan-MW/plotly.py
python
@property def symbol(self): '\n Sets the marker symbol type. Adding 100 is equivalent to\n appending "-open" to a symbol name. Adding 200 is equivalent to\n appending "-dot" to a symbol name. Adding 300 is equivalent to\n appending "-open-dot" or "dot-open" to a symbol name.\n \n The \'symbol\' property is an enumeration that may be specified as:\n - One of the following enumeration values:\n [0, \'circle\', 100, \'circle-open\', 200, \'circle-dot\', 300,\n \'circle-open-dot\', 1, \'square\', 101, \'square-open\', 201,\n \'square-dot\', 301, \'square-open-dot\', 2, \'diamond\', 102,\n \'diamond-open\', 202, \'diamond-dot\', 302,\n \'diamond-open-dot\', 3, \'cross\', 103, \'cross-open\', 203,\n \'cross-dot\', 303, \'cross-open-dot\', 4, \'x\', 104, \'x-open\',\n 204, \'x-dot\', 304, \'x-open-dot\', 5, \'triangle-up\', 105,\n \'triangle-up-open\', 205, \'triangle-up-dot\', 305,\n \'triangle-up-open-dot\', 6, \'triangle-down\', 106,\n \'triangle-down-open\', 206, \'triangle-down-dot\', 306,\n \'triangle-down-open-dot\', 7, \'triangle-left\', 107,\n \'triangle-left-open\', 207, \'triangle-left-dot\', 307,\n \'triangle-left-open-dot\', 8, \'triangle-right\', 108,\n \'triangle-right-open\', 208, \'triangle-right-dot\', 308,\n \'triangle-right-open-dot\', 9, \'triangle-ne\', 109,\n \'triangle-ne-open\', 209, \'triangle-ne-dot\', 309,\n \'triangle-ne-open-dot\', 10, \'triangle-se\', 110,\n \'triangle-se-open\', 210, \'triangle-se-dot\', 310,\n \'triangle-se-open-dot\', 11, \'triangle-sw\', 111,\n \'triangle-sw-open\', 211, \'triangle-sw-dot\', 311,\n \'triangle-sw-open-dot\', 12, \'triangle-nw\', 112,\n \'triangle-nw-open\', 212, \'triangle-nw-dot\', 312,\n \'triangle-nw-open-dot\', 13, \'pentagon\', 113,\n \'pentagon-open\', 213, \'pentagon-dot\', 313,\n \'pentagon-open-dot\', 14, \'hexagon\', 114, \'hexagon-open\',\n 214, \'hexagon-dot\', 314, \'hexagon-open-dot\', 15,\n \'hexagon2\', 115, \'hexagon2-open\', 215, \'hexagon2-dot\',\n 315, \'hexagon2-open-dot\', 16, \'octagon\', 116,\n \'octagon-open\', 216, \'octagon-dot\', 316,\n \'octagon-open-dot\', 17, \'star\', 117, \'star-open\', 217,\n \'star-dot\', 317, \'star-open-dot\', 18, \'hexagram\', 118,\n \'hexagram-open\', 218, \'hexagram-dot\', 318,\n \'hexagram-open-dot\', 19, \'star-triangle-up\', 119,\n \'star-triangle-up-open\', 219, \'star-triangle-up-dot\', 319,\n \'star-triangle-up-open-dot\', 20, \'star-triangle-down\',\n 120, \'star-triangle-down-open\', 220,\n \'star-triangle-down-dot\', 320,\n \'star-triangle-down-open-dot\', 21, \'star-square\', 121,\n \'star-square-open\', 221, \'star-square-dot\', 321,\n \'star-square-open-dot\', 22, \'star-diamond\', 122,\n \'star-diamond-open\', 222, \'star-diamond-dot\', 322,\n \'star-diamond-open-dot\', 23, \'diamond-tall\', 123,\n \'diamond-tall-open\', 223, \'diamond-tall-dot\', 323,\n \'diamond-tall-open-dot\', 24, \'diamond-wide\', 124,\n \'diamond-wide-open\', 224, \'diamond-wide-dot\', 324,\n \'diamond-wide-open-dot\', 25, \'hourglass\', 125,\n \'hourglass-open\', 26, \'bowtie\', 126, \'bowtie-open\', 27,\n \'circle-cross\', 127, \'circle-cross-open\', 28, \'circle-x\',\n 128, \'circle-x-open\', 29, \'square-cross\', 129,\n \'square-cross-open\', 30, \'square-x\', 130, \'square-x-open\',\n 31, \'diamond-cross\', 131, \'diamond-cross-open\', 32,\n \'diamond-x\', 132, \'diamond-x-open\', 33, \'cross-thin\', 133,\n \'cross-thin-open\', 34, \'x-thin\', 134, \'x-thin-open\', 35,\n \'asterisk\', 135, \'asterisk-open\', 36, \'hash\', 136,\n \'hash-open\', 236, \'hash-dot\', 336, \'hash-open-dot\', 37,\n \'y-up\', 137, \'y-up-open\', 38, \'y-down\', 138,\n \'y-down-open\', 39, \'y-left\', 139, \'y-left-open\', 40,\n \'y-right\', 140, \'y-right-open\', 41, \'line-ew\', 141,\n \'line-ew-open\', 42, \'line-ns\', 142, \'line-ns-open\', 43,\n \'line-ne\', 143, \'line-ne-open\', 44, \'line-nw\', 144,\n \'line-nw-open\']\n - A tuple, list, or one-dimensional numpy array of the above\n\n Returns\n -------\n Any|numpy.ndarray\n ' return self['symbol']
@property def symbolsrc(self): "\n Sets the source reference on plot.ly for symbol .\n \n The 'symbolsrc' property must be specified as a string or\n as a plotly.grid_objs.Column object\n\n Returns\n -------\n str\n " return self['symbolsrc']
-6,024,676,400,931,487,000
Sets the source reference on plot.ly for symbol . The 'symbolsrc' property must be specified as a string or as a plotly.grid_objs.Column object Returns ------- str
plotly/graph_objs/scattergeo/__init__.py
symbolsrc
Jonathan-MW/plotly.py
python
@property def symbolsrc(self): "\n Sets the source reference on plot.ly for symbol .\n \n The 'symbolsrc' property must be specified as a string or\n as a plotly.grid_objs.Column object\n\n Returns\n -------\n str\n " return self['symbolsrc']
def __init__(self, arg=None, autocolorscale=None, cauto=None, cmax=None, cmid=None, cmin=None, color=None, coloraxis=None, colorbar=None, colorscale=None, colorsrc=None, gradient=None, line=None, opacity=None, opacitysrc=None, reversescale=None, showscale=None, size=None, sizemin=None, sizemode=None, sizeref=None, sizesrc=None, symbol=None, symbolsrc=None, **kwargs): '\n Construct a new Marker object\n \n Parameters\n ----------\n arg\n dict of properties compatible with this constructor or\n an instance of plotly.graph_objs.scattergeo.Marker\n autocolorscale\n Determines whether the colorscale is a default palette\n (`autocolorscale: true`) or the palette determined by\n `marker.colorscale`. Has an effect only if in\n `marker.color`is set to a numerical array. In case\n `colorscale` is unspecified or `autocolorscale` is\n true, the default palette will be chosen according to\n whether numbers in the `color` array are all positive,\n all negative or mixed.\n cauto\n Determines whether or not the color domain is computed\n with respect to the input data (here in `marker.color`)\n or the bounds set in `marker.cmin` and `marker.cmax`\n Has an effect only if in `marker.color`is set to a\n numerical array. Defaults to `false` when `marker.cmin`\n and `marker.cmax` are set by the user.\n cmax\n Sets the upper bound of the color domain. Has an effect\n only if in `marker.color`is set to a numerical array.\n Value should have the same units as in `marker.color`\n and if set, `marker.cmin` must be set as well.\n cmid\n Sets the mid-point of the color domain by scaling\n `marker.cmin` and/or `marker.cmax` to be equidistant to\n this point. Has an effect only if in `marker.color`is\n set to a numerical array. Value should have the same\n units as in `marker.color`. Has no effect when\n `marker.cauto` is `false`.\n cmin\n Sets the lower bound of the color domain. Has an effect\n only if in `marker.color`is set to a numerical array.\n Value should have the same units as in `marker.color`\n and if set, `marker.cmax` must be set as well.\n color\n Sets themarkercolor. It accepts either a specific color\n or an array of numbers that are mapped to the\n colorscale relative to the max and min values of the\n array or relative to `marker.cmin` and `marker.cmax` if\n set.\n coloraxis\n Sets a reference to a shared color axis. References to\n these shared color axes are "coloraxis", "coloraxis2",\n "coloraxis3", etc. Settings for these shared color axes\n are set in the layout, under `layout.coloraxis`,\n `layout.coloraxis2`, etc. Note that multiple color\n scales can be linked to the same color axis.\n colorbar\n plotly.graph_objs.scattergeo.marker.ColorBar instance\n or dict with compatible properties\n colorscale\n Sets the colorscale. Has an effect only if in\n `marker.color`is set to a numerical array. The\n colorscale must be an array containing arrays mapping a\n normalized value to an rgb, rgba, hex, hsl, hsv, or\n named color string. At minimum, a mapping for the\n lowest (0) and highest (1) values are required. For\n example, `[[0, \'rgb(0,0,255)\', [1, \'rgb(255,0,0)\']]`.\n To control the bounds of the colorscale in color space,\n use`marker.cmin` and `marker.cmax`. Alternatively,\n `colorscale` may be a palette name string of the\n following list: Greys,YlGnBu,Greens,YlOrRd,Bluered,RdBu\n ,Reds,Blues,Picnic,Rainbow,Portland,Jet,Hot,Blackbody,E\n arth,Electric,Viridis,Cividis.\n colorsrc\n Sets the source reference on plot.ly for color .\n gradient\n plotly.graph_objs.scattergeo.marker.Gradient instance\n or dict with compatible properties\n line\n plotly.graph_objs.scattergeo.marker.Line instance or\n dict with compatible properties\n opacity\n Sets the marker opacity.\n opacitysrc\n Sets the source reference on plot.ly for opacity .\n reversescale\n Reverses the color mapping if true. Has an effect only\n if in `marker.color`is set to a numerical array. If\n true, `marker.cmin` will correspond to the last color\n in the array and `marker.cmax` will correspond to the\n first color.\n showscale\n Determines whether or not a colorbar is displayed for\n this trace. Has an effect only if in `marker.color`is\n set to a numerical array.\n size\n Sets the marker size (in px).\n sizemin\n Has an effect only if `marker.size` is set to a\n numerical array. Sets the minimum size (in px) of the\n rendered marker points.\n sizemode\n Has an effect only if `marker.size` is set to a\n numerical array. Sets the rule for which the data in\n `size` is converted to pixels.\n sizeref\n Has an effect only if `marker.size` is set to a\n numerical array. Sets the scale factor used to\n determine the rendered size of marker points. Use with\n `sizemin` and `sizemode`.\n sizesrc\n Sets the source reference on plot.ly for size .\n symbol\n Sets the marker symbol type. Adding 100 is equivalent\n to appending "-open" to a symbol name. Adding 200 is\n equivalent to appending "-dot" to a symbol name. Adding\n 300 is equivalent to appending "-open-dot" or "dot-\n open" to a symbol name.\n symbolsrc\n Sets the source reference on plot.ly for symbol .\n\n Returns\n -------\n Marker\n ' super(Marker, self).__init__('marker') if (arg is None): arg = {} elif isinstance(arg, self.__class__): arg = arg.to_plotly_json() elif isinstance(arg, dict): arg = _copy.copy(arg) else: raise ValueError('The first argument to the plotly.graph_objs.scattergeo.Marker \nconstructor must be a dict or \nan instance of plotly.graph_objs.scattergeo.Marker') self._skip_invalid = kwargs.pop('skip_invalid', False) from plotly.validators.scattergeo import marker as v_marker self._validators['autocolorscale'] = v_marker.AutocolorscaleValidator() self._validators['cauto'] = v_marker.CautoValidator() self._validators['cmax'] = v_marker.CmaxValidator() self._validators['cmid'] = v_marker.CmidValidator() self._validators['cmin'] = v_marker.CminValidator() self._validators['color'] = v_marker.ColorValidator() self._validators['coloraxis'] = v_marker.ColoraxisValidator() self._validators['colorbar'] = v_marker.ColorBarValidator() self._validators['colorscale'] = v_marker.ColorscaleValidator() self._validators['colorsrc'] = v_marker.ColorsrcValidator() self._validators['gradient'] = v_marker.GradientValidator() self._validators['line'] = v_marker.LineValidator() self._validators['opacity'] = v_marker.OpacityValidator() self._validators['opacitysrc'] = v_marker.OpacitysrcValidator() self._validators['reversescale'] = v_marker.ReversescaleValidator() self._validators['showscale'] = v_marker.ShowscaleValidator() self._validators['size'] = v_marker.SizeValidator() self._validators['sizemin'] = v_marker.SizeminValidator() self._validators['sizemode'] = v_marker.SizemodeValidator() self._validators['sizeref'] = v_marker.SizerefValidator() self._validators['sizesrc'] = v_marker.SizesrcValidator() self._validators['symbol'] = v_marker.SymbolValidator() self._validators['symbolsrc'] = v_marker.SymbolsrcValidator() _v = arg.pop('autocolorscale', None) self['autocolorscale'] = (autocolorscale if (autocolorscale is not None) else _v) _v = arg.pop('cauto', None) self['cauto'] = (cauto if (cauto is not None) else _v) _v = arg.pop('cmax', None) self['cmax'] = (cmax if (cmax is not None) else _v) _v = arg.pop('cmid', None) self['cmid'] = (cmid if (cmid is not None) else _v) _v = arg.pop('cmin', None) self['cmin'] = (cmin if (cmin is not None) else _v) _v = arg.pop('color', None) self['color'] = (color if (color is not None) else _v) _v = arg.pop('coloraxis', None) self['coloraxis'] = (coloraxis if (coloraxis is not None) else _v) _v = arg.pop('colorbar', None) self['colorbar'] = (colorbar if (colorbar is not None) else _v) _v = arg.pop('colorscale', None) self['colorscale'] = (colorscale if (colorscale is not None) else _v) _v = arg.pop('colorsrc', None) self['colorsrc'] = (colorsrc if (colorsrc is not None) else _v) _v = arg.pop('gradient', None) self['gradient'] = (gradient if (gradient is not None) else _v) _v = arg.pop('line', None) self['line'] = (line if (line is not None) else _v) _v = arg.pop('opacity', None) self['opacity'] = (opacity if (opacity is not None) else _v) _v = arg.pop('opacitysrc', None) self['opacitysrc'] = (opacitysrc if (opacitysrc is not None) else _v) _v = arg.pop('reversescale', None) self['reversescale'] = (reversescale if (reversescale is not None) else _v) _v = arg.pop('showscale', None) self['showscale'] = (showscale if (showscale is not None) else _v) _v = arg.pop('size', None) self['size'] = (size if (size is not None) else _v) _v = arg.pop('sizemin', None) self['sizemin'] = (sizemin if (sizemin is not None) else _v) _v = arg.pop('sizemode', None) self['sizemode'] = (sizemode if (sizemode is not None) else _v) _v = arg.pop('sizeref', None) self['sizeref'] = (sizeref if (sizeref is not None) else _v) _v = arg.pop('sizesrc', None) self['sizesrc'] = (sizesrc if (sizesrc is not None) else _v) _v = arg.pop('symbol', None) self['symbol'] = (symbol if (symbol is not None) else _v) _v = arg.pop('symbolsrc', None) self['symbolsrc'] = (symbolsrc if (symbolsrc is not None) else _v) self._process_kwargs(**dict(arg, **kwargs)) self._skip_invalid = False
-4,541,378,795,674,928,000
Construct a new Marker object Parameters ---------- arg dict of properties compatible with this constructor or an instance of plotly.graph_objs.scattergeo.Marker autocolorscale Determines whether the colorscale is a default palette (`autocolorscale: true`) or the palette determined by `marker.colorscale`. Has an effect only if in `marker.color`is set to a numerical array. In case `colorscale` is unspecified or `autocolorscale` is true, the default palette will be chosen according to whether numbers in the `color` array are all positive, all negative or mixed. cauto Determines whether or not the color domain is computed with respect to the input data (here in `marker.color`) or the bounds set in `marker.cmin` and `marker.cmax` Has an effect only if in `marker.color`is set to a numerical array. Defaults to `false` when `marker.cmin` and `marker.cmax` are set by the user. cmax Sets the upper bound of the color domain. Has an effect only if in `marker.color`is set to a numerical array. Value should have the same units as in `marker.color` and if set, `marker.cmin` must be set as well. cmid Sets the mid-point of the color domain by scaling `marker.cmin` and/or `marker.cmax` to be equidistant to this point. Has an effect only if in `marker.color`is set to a numerical array. Value should have the same units as in `marker.color`. Has no effect when `marker.cauto` is `false`. cmin Sets the lower bound of the color domain. Has an effect only if in `marker.color`is set to a numerical array. Value should have the same units as in `marker.color` and if set, `marker.cmax` must be set as well. color Sets themarkercolor. It accepts either a specific color or an array of numbers that are mapped to the colorscale relative to the max and min values of the array or relative to `marker.cmin` and `marker.cmax` if set. coloraxis Sets a reference to a shared color axis. References to these shared color axes are "coloraxis", "coloraxis2", "coloraxis3", etc. Settings for these shared color axes are set in the layout, under `layout.coloraxis`, `layout.coloraxis2`, etc. Note that multiple color scales can be linked to the same color axis. colorbar plotly.graph_objs.scattergeo.marker.ColorBar instance or dict with compatible properties colorscale Sets the colorscale. Has an effect only if in `marker.color`is set to a numerical array. The colorscale must be an array containing arrays mapping a normalized value to an rgb, rgba, hex, hsl, hsv, or named color string. At minimum, a mapping for the lowest (0) and highest (1) values are required. For example, `[[0, 'rgb(0,0,255)', [1, 'rgb(255,0,0)']]`. To control the bounds of the colorscale in color space, use`marker.cmin` and `marker.cmax`. Alternatively, `colorscale` may be a palette name string of the following list: Greys,YlGnBu,Greens,YlOrRd,Bluered,RdBu ,Reds,Blues,Picnic,Rainbow,Portland,Jet,Hot,Blackbody,E arth,Electric,Viridis,Cividis. colorsrc Sets the source reference on plot.ly for color . gradient plotly.graph_objs.scattergeo.marker.Gradient instance or dict with compatible properties line plotly.graph_objs.scattergeo.marker.Line instance or dict with compatible properties opacity Sets the marker opacity. opacitysrc Sets the source reference on plot.ly for opacity . reversescale Reverses the color mapping if true. Has an effect only if in `marker.color`is set to a numerical array. If true, `marker.cmin` will correspond to the last color in the array and `marker.cmax` will correspond to the first color. showscale Determines whether or not a colorbar is displayed for this trace. Has an effect only if in `marker.color`is set to a numerical array. size Sets the marker size (in px). sizemin Has an effect only if `marker.size` is set to a numerical array. Sets the minimum size (in px) of the rendered marker points. sizemode Has an effect only if `marker.size` is set to a numerical array. Sets the rule for which the data in `size` is converted to pixels. sizeref Has an effect only if `marker.size` is set to a numerical array. Sets the scale factor used to determine the rendered size of marker points. Use with `sizemin` and `sizemode`. sizesrc Sets the source reference on plot.ly for size . symbol Sets the marker symbol type. Adding 100 is equivalent to appending "-open" to a symbol name. Adding 200 is equivalent to appending "-dot" to a symbol name. Adding 300 is equivalent to appending "-open-dot" or "dot- open" to a symbol name. symbolsrc Sets the source reference on plot.ly for symbol . Returns ------- Marker
plotly/graph_objs/scattergeo/__init__.py
__init__
Jonathan-MW/plotly.py
python
def __init__(self, arg=None, autocolorscale=None, cauto=None, cmax=None, cmid=None, cmin=None, color=None, coloraxis=None, colorbar=None, colorscale=None, colorsrc=None, gradient=None, line=None, opacity=None, opacitysrc=None, reversescale=None, showscale=None, size=None, sizemin=None, sizemode=None, sizeref=None, sizesrc=None, symbol=None, symbolsrc=None, **kwargs): '\n Construct a new Marker object\n \n Parameters\n ----------\n arg\n dict of properties compatible with this constructor or\n an instance of plotly.graph_objs.scattergeo.Marker\n autocolorscale\n Determines whether the colorscale is a default palette\n (`autocolorscale: true`) or the palette determined by\n `marker.colorscale`. Has an effect only if in\n `marker.color`is set to a numerical array. In case\n `colorscale` is unspecified or `autocolorscale` is\n true, the default palette will be chosen according to\n whether numbers in the `color` array are all positive,\n all negative or mixed.\n cauto\n Determines whether or not the color domain is computed\n with respect to the input data (here in `marker.color`)\n or the bounds set in `marker.cmin` and `marker.cmax`\n Has an effect only if in `marker.color`is set to a\n numerical array. Defaults to `false` when `marker.cmin`\n and `marker.cmax` are set by the user.\n cmax\n Sets the upper bound of the color domain. Has an effect\n only if in `marker.color`is set to a numerical array.\n Value should have the same units as in `marker.color`\n and if set, `marker.cmin` must be set as well.\n cmid\n Sets the mid-point of the color domain by scaling\n `marker.cmin` and/or `marker.cmax` to be equidistant to\n this point. Has an effect only if in `marker.color`is\n set to a numerical array. Value should have the same\n units as in `marker.color`. Has no effect when\n `marker.cauto` is `false`.\n cmin\n Sets the lower bound of the color domain. Has an effect\n only if in `marker.color`is set to a numerical array.\n Value should have the same units as in `marker.color`\n and if set, `marker.cmax` must be set as well.\n color\n Sets themarkercolor. It accepts either a specific color\n or an array of numbers that are mapped to the\n colorscale relative to the max and min values of the\n array or relative to `marker.cmin` and `marker.cmax` if\n set.\n coloraxis\n Sets a reference to a shared color axis. References to\n these shared color axes are "coloraxis", "coloraxis2",\n "coloraxis3", etc. Settings for these shared color axes\n are set in the layout, under `layout.coloraxis`,\n `layout.coloraxis2`, etc. Note that multiple color\n scales can be linked to the same color axis.\n colorbar\n plotly.graph_objs.scattergeo.marker.ColorBar instance\n or dict with compatible properties\n colorscale\n Sets the colorscale. Has an effect only if in\n `marker.color`is set to a numerical array. The\n colorscale must be an array containing arrays mapping a\n normalized value to an rgb, rgba, hex, hsl, hsv, or\n named color string. At minimum, a mapping for the\n lowest (0) and highest (1) values are required. For\n example, `[[0, \'rgb(0,0,255)\', [1, \'rgb(255,0,0)\']]`.\n To control the bounds of the colorscale in color space,\n use`marker.cmin` and `marker.cmax`. Alternatively,\n `colorscale` may be a palette name string of the\n following list: Greys,YlGnBu,Greens,YlOrRd,Bluered,RdBu\n ,Reds,Blues,Picnic,Rainbow,Portland,Jet,Hot,Blackbody,E\n arth,Electric,Viridis,Cividis.\n colorsrc\n Sets the source reference on plot.ly for color .\n gradient\n plotly.graph_objs.scattergeo.marker.Gradient instance\n or dict with compatible properties\n line\n plotly.graph_objs.scattergeo.marker.Line instance or\n dict with compatible properties\n opacity\n Sets the marker opacity.\n opacitysrc\n Sets the source reference on plot.ly for opacity .\n reversescale\n Reverses the color mapping if true. Has an effect only\n if in `marker.color`is set to a numerical array. If\n true, `marker.cmin` will correspond to the last color\n in the array and `marker.cmax` will correspond to the\n first color.\n showscale\n Determines whether or not a colorbar is displayed for\n this trace. Has an effect only if in `marker.color`is\n set to a numerical array.\n size\n Sets the marker size (in px).\n sizemin\n Has an effect only if `marker.size` is set to a\n numerical array. Sets the minimum size (in px) of the\n rendered marker points.\n sizemode\n Has an effect only if `marker.size` is set to a\n numerical array. Sets the rule for which the data in\n `size` is converted to pixels.\n sizeref\n Has an effect only if `marker.size` is set to a\n numerical array. Sets the scale factor used to\n determine the rendered size of marker points. Use with\n `sizemin` and `sizemode`.\n sizesrc\n Sets the source reference on plot.ly for size .\n symbol\n Sets the marker symbol type. Adding 100 is equivalent\n to appending "-open" to a symbol name. Adding 200 is\n equivalent to appending "-dot" to a symbol name. Adding\n 300 is equivalent to appending "-open-dot" or "dot-\n open" to a symbol name.\n symbolsrc\n Sets the source reference on plot.ly for symbol .\n\n Returns\n -------\n Marker\n ' super(Marker, self).__init__('marker') if (arg is None): arg = {} elif isinstance(arg, self.__class__): arg = arg.to_plotly_json() elif isinstance(arg, dict): arg = _copy.copy(arg) else: raise ValueError('The first argument to the plotly.graph_objs.scattergeo.Marker \nconstructor must be a dict or \nan instance of plotly.graph_objs.scattergeo.Marker') self._skip_invalid = kwargs.pop('skip_invalid', False) from plotly.validators.scattergeo import marker as v_marker self._validators['autocolorscale'] = v_marker.AutocolorscaleValidator() self._validators['cauto'] = v_marker.CautoValidator() self._validators['cmax'] = v_marker.CmaxValidator() self._validators['cmid'] = v_marker.CmidValidator() self._validators['cmin'] = v_marker.CminValidator() self._validators['color'] = v_marker.ColorValidator() self._validators['coloraxis'] = v_marker.ColoraxisValidator() self._validators['colorbar'] = v_marker.ColorBarValidator() self._validators['colorscale'] = v_marker.ColorscaleValidator() self._validators['colorsrc'] = v_marker.ColorsrcValidator() self._validators['gradient'] = v_marker.GradientValidator() self._validators['line'] = v_marker.LineValidator() self._validators['opacity'] = v_marker.OpacityValidator() self._validators['opacitysrc'] = v_marker.OpacitysrcValidator() self._validators['reversescale'] = v_marker.ReversescaleValidator() self._validators['showscale'] = v_marker.ShowscaleValidator() self._validators['size'] = v_marker.SizeValidator() self._validators['sizemin'] = v_marker.SizeminValidator() self._validators['sizemode'] = v_marker.SizemodeValidator() self._validators['sizeref'] = v_marker.SizerefValidator() self._validators['sizesrc'] = v_marker.SizesrcValidator() self._validators['symbol'] = v_marker.SymbolValidator() self._validators['symbolsrc'] = v_marker.SymbolsrcValidator() _v = arg.pop('autocolorscale', None) self['autocolorscale'] = (autocolorscale if (autocolorscale is not None) else _v) _v = arg.pop('cauto', None) self['cauto'] = (cauto if (cauto is not None) else _v) _v = arg.pop('cmax', None) self['cmax'] = (cmax if (cmax is not None) else _v) _v = arg.pop('cmid', None) self['cmid'] = (cmid if (cmid is not None) else _v) _v = arg.pop('cmin', None) self['cmin'] = (cmin if (cmin is not None) else _v) _v = arg.pop('color', None) self['color'] = (color if (color is not None) else _v) _v = arg.pop('coloraxis', None) self['coloraxis'] = (coloraxis if (coloraxis is not None) else _v) _v = arg.pop('colorbar', None) self['colorbar'] = (colorbar if (colorbar is not None) else _v) _v = arg.pop('colorscale', None) self['colorscale'] = (colorscale if (colorscale is not None) else _v) _v = arg.pop('colorsrc', None) self['colorsrc'] = (colorsrc if (colorsrc is not None) else _v) _v = arg.pop('gradient', None) self['gradient'] = (gradient if (gradient is not None) else _v) _v = arg.pop('line', None) self['line'] = (line if (line is not None) else _v) _v = arg.pop('opacity', None) self['opacity'] = (opacity if (opacity is not None) else _v) _v = arg.pop('opacitysrc', None) self['opacitysrc'] = (opacitysrc if (opacitysrc is not None) else _v) _v = arg.pop('reversescale', None) self['reversescale'] = (reversescale if (reversescale is not None) else _v) _v = arg.pop('showscale', None) self['showscale'] = (showscale if (showscale is not None) else _v) _v = arg.pop('size', None) self['size'] = (size if (size is not None) else _v) _v = arg.pop('sizemin', None) self['sizemin'] = (sizemin if (sizemin is not None) else _v) _v = arg.pop('sizemode', None) self['sizemode'] = (sizemode if (sizemode is not None) else _v) _v = arg.pop('sizeref', None) self['sizeref'] = (sizeref if (sizeref is not None) else _v) _v = arg.pop('sizesrc', None) self['sizesrc'] = (sizesrc if (sizesrc is not None) else _v) _v = arg.pop('symbol', None) self['symbol'] = (symbol if (symbol is not None) else _v) _v = arg.pop('symbolsrc', None) self['symbolsrc'] = (symbolsrc if (symbolsrc is not None) else _v) self._process_kwargs(**dict(arg, **kwargs)) self._skip_invalid = False
@property def color(self): "\n Sets the line color.\n \n The 'color' property is a color and may be specified as:\n - A hex string (e.g. '#ff0000')\n - An rgb/rgba string (e.g. 'rgb(255,0,0)')\n - An hsl/hsla string (e.g. 'hsl(0,100%,50%)')\n - An hsv/hsva string (e.g. 'hsv(0,100%,100%)')\n - A named CSS color:\n aliceblue, antiquewhite, aqua, aquamarine, azure,\n beige, bisque, black, blanchedalmond, blue,\n blueviolet, brown, burlywood, cadetblue,\n chartreuse, chocolate, coral, cornflowerblue,\n cornsilk, crimson, cyan, darkblue, darkcyan,\n darkgoldenrod, darkgray, darkgrey, darkgreen,\n darkkhaki, darkmagenta, darkolivegreen, darkorange,\n darkorchid, darkred, darksalmon, darkseagreen,\n darkslateblue, darkslategray, darkslategrey,\n darkturquoise, darkviolet, deeppink, deepskyblue,\n dimgray, dimgrey, dodgerblue, firebrick,\n floralwhite, forestgreen, fuchsia, gainsboro,\n ghostwhite, gold, goldenrod, gray, grey, green,\n greenyellow, honeydew, hotpink, indianred, indigo,\n ivory, khaki, lavender, lavenderblush, lawngreen,\n lemonchiffon, lightblue, lightcoral, lightcyan,\n lightgoldenrodyellow, lightgray, lightgrey,\n lightgreen, lightpink, lightsalmon, lightseagreen,\n lightskyblue, lightslategray, lightslategrey,\n lightsteelblue, lightyellow, lime, limegreen,\n linen, magenta, maroon, mediumaquamarine,\n mediumblue, mediumorchid, mediumpurple,\n mediumseagreen, mediumslateblue, mediumspringgreen,\n mediumturquoise, mediumvioletred, midnightblue,\n mintcream, mistyrose, moccasin, navajowhite, navy,\n oldlace, olive, olivedrab, orange, orangered,\n orchid, palegoldenrod, palegreen, paleturquoise,\n palevioletred, papayawhip, peachpuff, peru, pink,\n plum, powderblue, purple, red, rosybrown,\n royalblue, saddlebrown, salmon, sandybrown,\n seagreen, seashell, sienna, silver, skyblue,\n slateblue, slategray, slategrey, snow, springgreen,\n steelblue, tan, teal, thistle, tomato, turquoise,\n violet, wheat, white, whitesmoke, yellow,\n yellowgreen\n\n Returns\n -------\n str\n " return self['color']
-4,622,030,073,859,431,000
Sets the line color. The 'color' property is a color and may be specified as: - A hex string (e.g. '#ff0000') - An rgb/rgba string (e.g. 'rgb(255,0,0)') - An hsl/hsla string (e.g. 'hsl(0,100%,50%)') - An hsv/hsva string (e.g. 'hsv(0,100%,100%)') - A named CSS color: aliceblue, antiquewhite, aqua, aquamarine, azure, beige, bisque, black, blanchedalmond, blue, blueviolet, brown, burlywood, cadetblue, chartreuse, chocolate, coral, cornflowerblue, cornsilk, crimson, cyan, darkblue, darkcyan, darkgoldenrod, darkgray, darkgrey, darkgreen, darkkhaki, darkmagenta, darkolivegreen, darkorange, darkorchid, darkred, darksalmon, darkseagreen, darkslateblue, darkslategray, darkslategrey, darkturquoise, darkviolet, deeppink, deepskyblue, dimgray, dimgrey, dodgerblue, firebrick, floralwhite, forestgreen, fuchsia, gainsboro, ghostwhite, gold, goldenrod, gray, grey, green, greenyellow, honeydew, hotpink, indianred, indigo, ivory, khaki, lavender, lavenderblush, lawngreen, lemonchiffon, lightblue, lightcoral, lightcyan, lightgoldenrodyellow, lightgray, lightgrey, lightgreen, lightpink, lightsalmon, lightseagreen, lightskyblue, lightslategray, lightslategrey, lightsteelblue, lightyellow, lime, limegreen, linen, magenta, maroon, mediumaquamarine, mediumblue, mediumorchid, mediumpurple, mediumseagreen, mediumslateblue, mediumspringgreen, mediumturquoise, mediumvioletred, midnightblue, mintcream, mistyrose, moccasin, navajowhite, navy, oldlace, olive, olivedrab, orange, orangered, orchid, palegoldenrod, palegreen, paleturquoise, palevioletred, papayawhip, peachpuff, peru, pink, plum, powderblue, purple, red, rosybrown, royalblue, saddlebrown, salmon, sandybrown, seagreen, seashell, sienna, silver, skyblue, slateblue, slategray, slategrey, snow, springgreen, steelblue, tan, teal, thistle, tomato, turquoise, violet, wheat, white, whitesmoke, yellow, yellowgreen Returns ------- str
plotly/graph_objs/scattergeo/__init__.py
color
Jonathan-MW/plotly.py
python
@property def color(self): "\n Sets the line color.\n \n The 'color' property is a color and may be specified as:\n - A hex string (e.g. '#ff0000')\n - An rgb/rgba string (e.g. 'rgb(255,0,0)')\n - An hsl/hsla string (e.g. 'hsl(0,100%,50%)')\n - An hsv/hsva string (e.g. 'hsv(0,100%,100%)')\n - A named CSS color:\n aliceblue, antiquewhite, aqua, aquamarine, azure,\n beige, bisque, black, blanchedalmond, blue,\n blueviolet, brown, burlywood, cadetblue,\n chartreuse, chocolate, coral, cornflowerblue,\n cornsilk, crimson, cyan, darkblue, darkcyan,\n darkgoldenrod, darkgray, darkgrey, darkgreen,\n darkkhaki, darkmagenta, darkolivegreen, darkorange,\n darkorchid, darkred, darksalmon, darkseagreen,\n darkslateblue, darkslategray, darkslategrey,\n darkturquoise, darkviolet, deeppink, deepskyblue,\n dimgray, dimgrey, dodgerblue, firebrick,\n floralwhite, forestgreen, fuchsia, gainsboro,\n ghostwhite, gold, goldenrod, gray, grey, green,\n greenyellow, honeydew, hotpink, indianred, indigo,\n ivory, khaki, lavender, lavenderblush, lawngreen,\n lemonchiffon, lightblue, lightcoral, lightcyan,\n lightgoldenrodyellow, lightgray, lightgrey,\n lightgreen, lightpink, lightsalmon, lightseagreen,\n lightskyblue, lightslategray, lightslategrey,\n lightsteelblue, lightyellow, lime, limegreen,\n linen, magenta, maroon, mediumaquamarine,\n mediumblue, mediumorchid, mediumpurple,\n mediumseagreen, mediumslateblue, mediumspringgreen,\n mediumturquoise, mediumvioletred, midnightblue,\n mintcream, mistyrose, moccasin, navajowhite, navy,\n oldlace, olive, olivedrab, orange, orangered,\n orchid, palegoldenrod, palegreen, paleturquoise,\n palevioletred, papayawhip, peachpuff, peru, pink,\n plum, powderblue, purple, red, rosybrown,\n royalblue, saddlebrown, salmon, sandybrown,\n seagreen, seashell, sienna, silver, skyblue,\n slateblue, slategray, slategrey, snow, springgreen,\n steelblue, tan, teal, thistle, tomato, turquoise,\n violet, wheat, white, whitesmoke, yellow,\n yellowgreen\n\n Returns\n -------\n str\n " return self['color']
@property def dash(self): '\n Sets the dash style of lines. Set to a dash type string\n ("solid", "dot", "dash", "longdash", "dashdot", or\n "longdashdot") or a dash length list in px (eg\n "5px,10px,2px,2px").\n \n The \'dash\' property is an enumeration that may be specified as:\n - One of the following dash styles:\n [\'solid\', \'dot\', \'dash\', \'longdash\', \'dashdot\', \'longdashdot\']\n - A string containing a dash length list in pixels or percentages\n (e.g. \'5px 10px 2px 2px\', \'5, 10, 2, 2\', \'10% 20% 40%\', etc.)\n\n Returns\n -------\n str\n ' return self['dash']
-7,424,793,383,219,417,000
Sets the dash style of lines. Set to a dash type string ("solid", "dot", "dash", "longdash", "dashdot", or "longdashdot") or a dash length list in px (eg "5px,10px,2px,2px"). The 'dash' property is an enumeration that may be specified as: - One of the following dash styles: ['solid', 'dot', 'dash', 'longdash', 'dashdot', 'longdashdot'] - A string containing a dash length list in pixels or percentages (e.g. '5px 10px 2px 2px', '5, 10, 2, 2', '10% 20% 40%', etc.) Returns ------- str
plotly/graph_objs/scattergeo/__init__.py
dash
Jonathan-MW/plotly.py
python
@property def dash(self): '\n Sets the dash style of lines. Set to a dash type string\n ("solid", "dot", "dash", "longdash", "dashdot", or\n "longdashdot") or a dash length list in px (eg\n "5px,10px,2px,2px").\n \n The \'dash\' property is an enumeration that may be specified as:\n - One of the following dash styles:\n [\'solid\', \'dot\', \'dash\', \'longdash\', \'dashdot\', \'longdashdot\']\n - A string containing a dash length list in pixels or percentages\n (e.g. \'5px 10px 2px 2px\', \'5, 10, 2, 2\', \'10% 20% 40%\', etc.)\n\n Returns\n -------\n str\n ' return self['dash']
@property def width(self): "\n Sets the line width (in px).\n \n The 'width' property is a number and may be specified as:\n - An int or float in the interval [0, inf]\n\n Returns\n -------\n int|float\n " return self['width']
-1,699,504,145,934,547,700
Sets the line width (in px). The 'width' property is a number and may be specified as: - An int or float in the interval [0, inf] Returns ------- int|float
plotly/graph_objs/scattergeo/__init__.py
width
Jonathan-MW/plotly.py
python
@property def width(self): "\n Sets the line width (in px).\n \n The 'width' property is a number and may be specified as:\n - An int or float in the interval [0, inf]\n\n Returns\n -------\n int|float\n " return self['width']
def __init__(self, arg=None, color=None, dash=None, width=None, **kwargs): '\n Construct a new Line object\n \n Parameters\n ----------\n arg\n dict of properties compatible with this constructor or\n an instance of plotly.graph_objs.scattergeo.Line\n color\n Sets the line color.\n dash\n Sets the dash style of lines. Set to a dash type string\n ("solid", "dot", "dash", "longdash", "dashdot", or\n "longdashdot") or a dash length list in px (eg\n "5px,10px,2px,2px").\n width\n Sets the line width (in px).\n\n Returns\n -------\n Line\n ' super(Line, self).__init__('line') if (arg is None): arg = {} elif isinstance(arg, self.__class__): arg = arg.to_plotly_json() elif isinstance(arg, dict): arg = _copy.copy(arg) else: raise ValueError('The first argument to the plotly.graph_objs.scattergeo.Line \nconstructor must be a dict or \nan instance of plotly.graph_objs.scattergeo.Line') self._skip_invalid = kwargs.pop('skip_invalid', False) from plotly.validators.scattergeo import line as v_line self._validators['color'] = v_line.ColorValidator() self._validators['dash'] = v_line.DashValidator() self._validators['width'] = v_line.WidthValidator() _v = arg.pop('color', None) self['color'] = (color if (color is not None) else _v) _v = arg.pop('dash', None) self['dash'] = (dash if (dash is not None) else _v) _v = arg.pop('width', None) self['width'] = (width if (width is not None) else _v) self._process_kwargs(**dict(arg, **kwargs)) self._skip_invalid = False
8,805,425,881,314,854,000
Construct a new Line object Parameters ---------- arg dict of properties compatible with this constructor or an instance of plotly.graph_objs.scattergeo.Line color Sets the line color. dash Sets the dash style of lines. Set to a dash type string ("solid", "dot", "dash", "longdash", "dashdot", or "longdashdot") or a dash length list in px (eg "5px,10px,2px,2px"). width Sets the line width (in px). Returns ------- Line
plotly/graph_objs/scattergeo/__init__.py
__init__
Jonathan-MW/plotly.py
python
def __init__(self, arg=None, color=None, dash=None, width=None, **kwargs): '\n Construct a new Line object\n \n Parameters\n ----------\n arg\n dict of properties compatible with this constructor or\n an instance of plotly.graph_objs.scattergeo.Line\n color\n Sets the line color.\n dash\n Sets the dash style of lines. Set to a dash type string\n ("solid", "dot", "dash", "longdash", "dashdot", or\n "longdashdot") or a dash length list in px (eg\n "5px,10px,2px,2px").\n width\n Sets the line width (in px).\n\n Returns\n -------\n Line\n ' super(Line, self).__init__('line') if (arg is None): arg = {} elif isinstance(arg, self.__class__): arg = arg.to_plotly_json() elif isinstance(arg, dict): arg = _copy.copy(arg) else: raise ValueError('The first argument to the plotly.graph_objs.scattergeo.Line \nconstructor must be a dict or \nan instance of plotly.graph_objs.scattergeo.Line') self._skip_invalid = kwargs.pop('skip_invalid', False) from plotly.validators.scattergeo import line as v_line self._validators['color'] = v_line.ColorValidator() self._validators['dash'] = v_line.DashValidator() self._validators['width'] = v_line.WidthValidator() _v = arg.pop('color', None) self['color'] = (color if (color is not None) else _v) _v = arg.pop('dash', None) self['dash'] = (dash if (dash is not None) else _v) _v = arg.pop('width', None) self['width'] = (width if (width is not None) else _v) self._process_kwargs(**dict(arg, **kwargs)) self._skip_invalid = False
@property def align(self): "\n Sets the horizontal alignment of the text content within hover\n label box. Has an effect only if the hover label text spans\n more two or more lines\n \n The 'align' property is an enumeration that may be specified as:\n - One of the following enumeration values:\n ['left', 'right', 'auto']\n - A tuple, list, or one-dimensional numpy array of the above\n\n Returns\n -------\n Any|numpy.ndarray\n " return self['align']
-7,126,462,146,178,984,000
Sets the horizontal alignment of the text content within hover label box. Has an effect only if the hover label text spans more two or more lines The 'align' property is an enumeration that may be specified as: - One of the following enumeration values: ['left', 'right', 'auto'] - A tuple, list, or one-dimensional numpy array of the above Returns ------- Any|numpy.ndarray
plotly/graph_objs/scattergeo/__init__.py
align
Jonathan-MW/plotly.py
python
@property def align(self): "\n Sets the horizontal alignment of the text content within hover\n label box. Has an effect only if the hover label text spans\n more two or more lines\n \n The 'align' property is an enumeration that may be specified as:\n - One of the following enumeration values:\n ['left', 'right', 'auto']\n - A tuple, list, or one-dimensional numpy array of the above\n\n Returns\n -------\n Any|numpy.ndarray\n " return self['align']
@property def alignsrc(self): "\n Sets the source reference on plot.ly for align .\n \n The 'alignsrc' property must be specified as a string or\n as a plotly.grid_objs.Column object\n\n Returns\n -------\n str\n " return self['alignsrc']
3,193,618,158,748,979,700
Sets the source reference on plot.ly for align . The 'alignsrc' property must be specified as a string or as a plotly.grid_objs.Column object Returns ------- str
plotly/graph_objs/scattergeo/__init__.py
alignsrc
Jonathan-MW/plotly.py
python
@property def alignsrc(self): "\n Sets the source reference on plot.ly for align .\n \n The 'alignsrc' property must be specified as a string or\n as a plotly.grid_objs.Column object\n\n Returns\n -------\n str\n " return self['alignsrc']
@property def bgcolor(self): "\n Sets the background color of the hover labels for this trace\n \n The 'bgcolor' property is a color and may be specified as:\n - A hex string (e.g. '#ff0000')\n - An rgb/rgba string (e.g. 'rgb(255,0,0)')\n - An hsl/hsla string (e.g. 'hsl(0,100%,50%)')\n - An hsv/hsva string (e.g. 'hsv(0,100%,100%)')\n - A named CSS color:\n aliceblue, antiquewhite, aqua, aquamarine, azure,\n beige, bisque, black, blanchedalmond, blue,\n blueviolet, brown, burlywood, cadetblue,\n chartreuse, chocolate, coral, cornflowerblue,\n cornsilk, crimson, cyan, darkblue, darkcyan,\n darkgoldenrod, darkgray, darkgrey, darkgreen,\n darkkhaki, darkmagenta, darkolivegreen, darkorange,\n darkorchid, darkred, darksalmon, darkseagreen,\n darkslateblue, darkslategray, darkslategrey,\n darkturquoise, darkviolet, deeppink, deepskyblue,\n dimgray, dimgrey, dodgerblue, firebrick,\n floralwhite, forestgreen, fuchsia, gainsboro,\n ghostwhite, gold, goldenrod, gray, grey, green,\n greenyellow, honeydew, hotpink, indianred, indigo,\n ivory, khaki, lavender, lavenderblush, lawngreen,\n lemonchiffon, lightblue, lightcoral, lightcyan,\n lightgoldenrodyellow, lightgray, lightgrey,\n lightgreen, lightpink, lightsalmon, lightseagreen,\n lightskyblue, lightslategray, lightslategrey,\n lightsteelblue, lightyellow, lime, limegreen,\n linen, magenta, maroon, mediumaquamarine,\n mediumblue, mediumorchid, mediumpurple,\n mediumseagreen, mediumslateblue, mediumspringgreen,\n mediumturquoise, mediumvioletred, midnightblue,\n mintcream, mistyrose, moccasin, navajowhite, navy,\n oldlace, olive, olivedrab, orange, orangered,\n orchid, palegoldenrod, palegreen, paleturquoise,\n palevioletred, papayawhip, peachpuff, peru, pink,\n plum, powderblue, purple, red, rosybrown,\n royalblue, saddlebrown, salmon, sandybrown,\n seagreen, seashell, sienna, silver, skyblue,\n slateblue, slategray, slategrey, snow, springgreen,\n steelblue, tan, teal, thistle, tomato, turquoise,\n violet, wheat, white, whitesmoke, yellow,\n yellowgreen\n - A list or array of any of the above\n\n Returns\n -------\n str|numpy.ndarray\n " return self['bgcolor']
-5,479,036,764,658,648,000
Sets the background color of the hover labels for this trace The 'bgcolor' property is a color and may be specified as: - A hex string (e.g. '#ff0000') - An rgb/rgba string (e.g. 'rgb(255,0,0)') - An hsl/hsla string (e.g. 'hsl(0,100%,50%)') - An hsv/hsva string (e.g. 'hsv(0,100%,100%)') - A named CSS color: aliceblue, antiquewhite, aqua, aquamarine, azure, beige, bisque, black, blanchedalmond, blue, blueviolet, brown, burlywood, cadetblue, chartreuse, chocolate, coral, cornflowerblue, cornsilk, crimson, cyan, darkblue, darkcyan, darkgoldenrod, darkgray, darkgrey, darkgreen, darkkhaki, darkmagenta, darkolivegreen, darkorange, darkorchid, darkred, darksalmon, darkseagreen, darkslateblue, darkslategray, darkslategrey, darkturquoise, darkviolet, deeppink, deepskyblue, dimgray, dimgrey, dodgerblue, firebrick, floralwhite, forestgreen, fuchsia, gainsboro, ghostwhite, gold, goldenrod, gray, grey, green, greenyellow, honeydew, hotpink, indianred, indigo, ivory, khaki, lavender, lavenderblush, lawngreen, lemonchiffon, lightblue, lightcoral, lightcyan, lightgoldenrodyellow, lightgray, lightgrey, lightgreen, lightpink, lightsalmon, lightseagreen, lightskyblue, lightslategray, lightslategrey, lightsteelblue, lightyellow, lime, limegreen, linen, magenta, maroon, mediumaquamarine, mediumblue, mediumorchid, mediumpurple, mediumseagreen, mediumslateblue, mediumspringgreen, mediumturquoise, mediumvioletred, midnightblue, mintcream, mistyrose, moccasin, navajowhite, navy, oldlace, olive, olivedrab, orange, orangered, orchid, palegoldenrod, palegreen, paleturquoise, palevioletred, papayawhip, peachpuff, peru, pink, plum, powderblue, purple, red, rosybrown, royalblue, saddlebrown, salmon, sandybrown, seagreen, seashell, sienna, silver, skyblue, slateblue, slategray, slategrey, snow, springgreen, steelblue, tan, teal, thistle, tomato, turquoise, violet, wheat, white, whitesmoke, yellow, yellowgreen - A list or array of any of the above Returns ------- str|numpy.ndarray
plotly/graph_objs/scattergeo/__init__.py
bgcolor
Jonathan-MW/plotly.py
python
@property def bgcolor(self): "\n Sets the background color of the hover labels for this trace\n \n The 'bgcolor' property is a color and may be specified as:\n - A hex string (e.g. '#ff0000')\n - An rgb/rgba string (e.g. 'rgb(255,0,0)')\n - An hsl/hsla string (e.g. 'hsl(0,100%,50%)')\n - An hsv/hsva string (e.g. 'hsv(0,100%,100%)')\n - A named CSS color:\n aliceblue, antiquewhite, aqua, aquamarine, azure,\n beige, bisque, black, blanchedalmond, blue,\n blueviolet, brown, burlywood, cadetblue,\n chartreuse, chocolate, coral, cornflowerblue,\n cornsilk, crimson, cyan, darkblue, darkcyan,\n darkgoldenrod, darkgray, darkgrey, darkgreen,\n darkkhaki, darkmagenta, darkolivegreen, darkorange,\n darkorchid, darkred, darksalmon, darkseagreen,\n darkslateblue, darkslategray, darkslategrey,\n darkturquoise, darkviolet, deeppink, deepskyblue,\n dimgray, dimgrey, dodgerblue, firebrick,\n floralwhite, forestgreen, fuchsia, gainsboro,\n ghostwhite, gold, goldenrod, gray, grey, green,\n greenyellow, honeydew, hotpink, indianred, indigo,\n ivory, khaki, lavender, lavenderblush, lawngreen,\n lemonchiffon, lightblue, lightcoral, lightcyan,\n lightgoldenrodyellow, lightgray, lightgrey,\n lightgreen, lightpink, lightsalmon, lightseagreen,\n lightskyblue, lightslategray, lightslategrey,\n lightsteelblue, lightyellow, lime, limegreen,\n linen, magenta, maroon, mediumaquamarine,\n mediumblue, mediumorchid, mediumpurple,\n mediumseagreen, mediumslateblue, mediumspringgreen,\n mediumturquoise, mediumvioletred, midnightblue,\n mintcream, mistyrose, moccasin, navajowhite, navy,\n oldlace, olive, olivedrab, orange, orangered,\n orchid, palegoldenrod, palegreen, paleturquoise,\n palevioletred, papayawhip, peachpuff, peru, pink,\n plum, powderblue, purple, red, rosybrown,\n royalblue, saddlebrown, salmon, sandybrown,\n seagreen, seashell, sienna, silver, skyblue,\n slateblue, slategray, slategrey, snow, springgreen,\n steelblue, tan, teal, thistle, tomato, turquoise,\n violet, wheat, white, whitesmoke, yellow,\n yellowgreen\n - A list or array of any of the above\n\n Returns\n -------\n str|numpy.ndarray\n " return self['bgcolor']
@property def bgcolorsrc(self): "\n Sets the source reference on plot.ly for bgcolor .\n \n The 'bgcolorsrc' property must be specified as a string or\n as a plotly.grid_objs.Column object\n\n Returns\n -------\n str\n " return self['bgcolorsrc']
1,430,722,777,616,976,100
Sets the source reference on plot.ly for bgcolor . The 'bgcolorsrc' property must be specified as a string or as a plotly.grid_objs.Column object Returns ------- str
plotly/graph_objs/scattergeo/__init__.py
bgcolorsrc
Jonathan-MW/plotly.py
python
@property def bgcolorsrc(self): "\n Sets the source reference on plot.ly for bgcolor .\n \n The 'bgcolorsrc' property must be specified as a string or\n as a plotly.grid_objs.Column object\n\n Returns\n -------\n str\n " return self['bgcolorsrc']
@property def bordercolor(self): "\n Sets the border color of the hover labels for this trace.\n \n The 'bordercolor' property is a color and may be specified as:\n - A hex string (e.g. '#ff0000')\n - An rgb/rgba string (e.g. 'rgb(255,0,0)')\n - An hsl/hsla string (e.g. 'hsl(0,100%,50%)')\n - An hsv/hsva string (e.g. 'hsv(0,100%,100%)')\n - A named CSS color:\n aliceblue, antiquewhite, aqua, aquamarine, azure,\n beige, bisque, black, blanchedalmond, blue,\n blueviolet, brown, burlywood, cadetblue,\n chartreuse, chocolate, coral, cornflowerblue,\n cornsilk, crimson, cyan, darkblue, darkcyan,\n darkgoldenrod, darkgray, darkgrey, darkgreen,\n darkkhaki, darkmagenta, darkolivegreen, darkorange,\n darkorchid, darkred, darksalmon, darkseagreen,\n darkslateblue, darkslategray, darkslategrey,\n darkturquoise, darkviolet, deeppink, deepskyblue,\n dimgray, dimgrey, dodgerblue, firebrick,\n floralwhite, forestgreen, fuchsia, gainsboro,\n ghostwhite, gold, goldenrod, gray, grey, green,\n greenyellow, honeydew, hotpink, indianred, indigo,\n ivory, khaki, lavender, lavenderblush, lawngreen,\n lemonchiffon, lightblue, lightcoral, lightcyan,\n lightgoldenrodyellow, lightgray, lightgrey,\n lightgreen, lightpink, lightsalmon, lightseagreen,\n lightskyblue, lightslategray, lightslategrey,\n lightsteelblue, lightyellow, lime, limegreen,\n linen, magenta, maroon, mediumaquamarine,\n mediumblue, mediumorchid, mediumpurple,\n mediumseagreen, mediumslateblue, mediumspringgreen,\n mediumturquoise, mediumvioletred, midnightblue,\n mintcream, mistyrose, moccasin, navajowhite, navy,\n oldlace, olive, olivedrab, orange, orangered,\n orchid, palegoldenrod, palegreen, paleturquoise,\n palevioletred, papayawhip, peachpuff, peru, pink,\n plum, powderblue, purple, red, rosybrown,\n royalblue, saddlebrown, salmon, sandybrown,\n seagreen, seashell, sienna, silver, skyblue,\n slateblue, slategray, slategrey, snow, springgreen,\n steelblue, tan, teal, thistle, tomato, turquoise,\n violet, wheat, white, whitesmoke, yellow,\n yellowgreen\n - A list or array of any of the above\n\n Returns\n -------\n str|numpy.ndarray\n " return self['bordercolor']
-6,449,970,727,678,226,000
Sets the border color of the hover labels for this trace. The 'bordercolor' property is a color and may be specified as: - A hex string (e.g. '#ff0000') - An rgb/rgba string (e.g. 'rgb(255,0,0)') - An hsl/hsla string (e.g. 'hsl(0,100%,50%)') - An hsv/hsva string (e.g. 'hsv(0,100%,100%)') - A named CSS color: aliceblue, antiquewhite, aqua, aquamarine, azure, beige, bisque, black, blanchedalmond, blue, blueviolet, brown, burlywood, cadetblue, chartreuse, chocolate, coral, cornflowerblue, cornsilk, crimson, cyan, darkblue, darkcyan, darkgoldenrod, darkgray, darkgrey, darkgreen, darkkhaki, darkmagenta, darkolivegreen, darkorange, darkorchid, darkred, darksalmon, darkseagreen, darkslateblue, darkslategray, darkslategrey, darkturquoise, darkviolet, deeppink, deepskyblue, dimgray, dimgrey, dodgerblue, firebrick, floralwhite, forestgreen, fuchsia, gainsboro, ghostwhite, gold, goldenrod, gray, grey, green, greenyellow, honeydew, hotpink, indianred, indigo, ivory, khaki, lavender, lavenderblush, lawngreen, lemonchiffon, lightblue, lightcoral, lightcyan, lightgoldenrodyellow, lightgray, lightgrey, lightgreen, lightpink, lightsalmon, lightseagreen, lightskyblue, lightslategray, lightslategrey, lightsteelblue, lightyellow, lime, limegreen, linen, magenta, maroon, mediumaquamarine, mediumblue, mediumorchid, mediumpurple, mediumseagreen, mediumslateblue, mediumspringgreen, mediumturquoise, mediumvioletred, midnightblue, mintcream, mistyrose, moccasin, navajowhite, navy, oldlace, olive, olivedrab, orange, orangered, orchid, palegoldenrod, palegreen, paleturquoise, palevioletred, papayawhip, peachpuff, peru, pink, plum, powderblue, purple, red, rosybrown, royalblue, saddlebrown, salmon, sandybrown, seagreen, seashell, sienna, silver, skyblue, slateblue, slategray, slategrey, snow, springgreen, steelblue, tan, teal, thistle, tomato, turquoise, violet, wheat, white, whitesmoke, yellow, yellowgreen - A list or array of any of the above Returns ------- str|numpy.ndarray
plotly/graph_objs/scattergeo/__init__.py
bordercolor
Jonathan-MW/plotly.py
python
@property def bordercolor(self): "\n Sets the border color of the hover labels for this trace.\n \n The 'bordercolor' property is a color and may be specified as:\n - A hex string (e.g. '#ff0000')\n - An rgb/rgba string (e.g. 'rgb(255,0,0)')\n - An hsl/hsla string (e.g. 'hsl(0,100%,50%)')\n - An hsv/hsva string (e.g. 'hsv(0,100%,100%)')\n - A named CSS color:\n aliceblue, antiquewhite, aqua, aquamarine, azure,\n beige, bisque, black, blanchedalmond, blue,\n blueviolet, brown, burlywood, cadetblue,\n chartreuse, chocolate, coral, cornflowerblue,\n cornsilk, crimson, cyan, darkblue, darkcyan,\n darkgoldenrod, darkgray, darkgrey, darkgreen,\n darkkhaki, darkmagenta, darkolivegreen, darkorange,\n darkorchid, darkred, darksalmon, darkseagreen,\n darkslateblue, darkslategray, darkslategrey,\n darkturquoise, darkviolet, deeppink, deepskyblue,\n dimgray, dimgrey, dodgerblue, firebrick,\n floralwhite, forestgreen, fuchsia, gainsboro,\n ghostwhite, gold, goldenrod, gray, grey, green,\n greenyellow, honeydew, hotpink, indianred, indigo,\n ivory, khaki, lavender, lavenderblush, lawngreen,\n lemonchiffon, lightblue, lightcoral, lightcyan,\n lightgoldenrodyellow, lightgray, lightgrey,\n lightgreen, lightpink, lightsalmon, lightseagreen,\n lightskyblue, lightslategray, lightslategrey,\n lightsteelblue, lightyellow, lime, limegreen,\n linen, magenta, maroon, mediumaquamarine,\n mediumblue, mediumorchid, mediumpurple,\n mediumseagreen, mediumslateblue, mediumspringgreen,\n mediumturquoise, mediumvioletred, midnightblue,\n mintcream, mistyrose, moccasin, navajowhite, navy,\n oldlace, olive, olivedrab, orange, orangered,\n orchid, palegoldenrod, palegreen, paleturquoise,\n palevioletred, papayawhip, peachpuff, peru, pink,\n plum, powderblue, purple, red, rosybrown,\n royalblue, saddlebrown, salmon, sandybrown,\n seagreen, seashell, sienna, silver, skyblue,\n slateblue, slategray, slategrey, snow, springgreen,\n steelblue, tan, teal, thistle, tomato, turquoise,\n violet, wheat, white, whitesmoke, yellow,\n yellowgreen\n - A list or array of any of the above\n\n Returns\n -------\n str|numpy.ndarray\n " return self['bordercolor']
@property def bordercolorsrc(self): "\n Sets the source reference on plot.ly for bordercolor .\n \n The 'bordercolorsrc' property must be specified as a string or\n as a plotly.grid_objs.Column object\n\n Returns\n -------\n str\n " return self['bordercolorsrc']
7,110,519,896,490,883,000
Sets the source reference on plot.ly for bordercolor . The 'bordercolorsrc' property must be specified as a string or as a plotly.grid_objs.Column object Returns ------- str
plotly/graph_objs/scattergeo/__init__.py
bordercolorsrc
Jonathan-MW/plotly.py
python
@property def bordercolorsrc(self): "\n Sets the source reference on plot.ly for bordercolor .\n \n The 'bordercolorsrc' property must be specified as a string or\n as a plotly.grid_objs.Column object\n\n Returns\n -------\n str\n " return self['bordercolorsrc']
@property def font(self): '\n Sets the font used in hover labels.\n \n The \'font\' property is an instance of Font\n that may be specified as:\n - An instance of plotly.graph_objs.scattergeo.hoverlabel.Font\n - A dict of string/value properties that will be passed\n to the Font constructor\n \n Supported dict properties:\n \n color\n \n colorsrc\n Sets the source reference on plot.ly for color\n .\n family\n HTML font family - the typeface that will be\n applied by the web browser. The web browser\n will only be able to apply a font if it is\n available on the system which it operates.\n Provide multiple font families, separated by\n commas, to indicate the preference in which to\n apply fonts if they aren\'t available on the\n system. The plotly service (at https://plot.ly\n or on-premise) generates images on a server,\n where only a select number of fonts are\n installed and supported. These include "Arial",\n "Balto", "Courier New", "Droid Sans",, "Droid\n Serif", "Droid Sans Mono", "Gravitas One", "Old\n Standard TT", "Open Sans", "Overpass", "PT Sans\n Narrow", "Raleway", "Times New Roman".\n familysrc\n Sets the source reference on plot.ly for\n family .\n size\n \n sizesrc\n Sets the source reference on plot.ly for size\n .\n\n Returns\n -------\n plotly.graph_objs.scattergeo.hoverlabel.Font\n ' return self['font']
-3,951,941,290,926,129,700
Sets the font used in hover labels. The 'font' property is an instance of Font that may be specified as: - An instance of plotly.graph_objs.scattergeo.hoverlabel.Font - A dict of string/value properties that will be passed to the Font constructor Supported dict properties: color colorsrc Sets the source reference on plot.ly for color . family HTML font family - the typeface that will be applied by the web browser. The web browser will only be able to apply a font if it is available on the system which it operates. Provide multiple font families, separated by commas, to indicate the preference in which to apply fonts if they aren't available on the system. The plotly service (at https://plot.ly or on-premise) generates images on a server, where only a select number of fonts are installed and supported. These include "Arial", "Balto", "Courier New", "Droid Sans",, "Droid Serif", "Droid Sans Mono", "Gravitas One", "Old Standard TT", "Open Sans", "Overpass", "PT Sans Narrow", "Raleway", "Times New Roman". familysrc Sets the source reference on plot.ly for family . size sizesrc Sets the source reference on plot.ly for size . Returns ------- plotly.graph_objs.scattergeo.hoverlabel.Font
plotly/graph_objs/scattergeo/__init__.py
font
Jonathan-MW/plotly.py
python
@property def font(self): '\n Sets the font used in hover labels.\n \n The \'font\' property is an instance of Font\n that may be specified as:\n - An instance of plotly.graph_objs.scattergeo.hoverlabel.Font\n - A dict of string/value properties that will be passed\n to the Font constructor\n \n Supported dict properties:\n \n color\n \n colorsrc\n Sets the source reference on plot.ly for color\n .\n family\n HTML font family - the typeface that will be\n applied by the web browser. The web browser\n will only be able to apply a font if it is\n available on the system which it operates.\n Provide multiple font families, separated by\n commas, to indicate the preference in which to\n apply fonts if they aren\'t available on the\n system. The plotly service (at https://plot.ly\n or on-premise) generates images on a server,\n where only a select number of fonts are\n installed and supported. These include "Arial",\n "Balto", "Courier New", "Droid Sans",, "Droid\n Serif", "Droid Sans Mono", "Gravitas One", "Old\n Standard TT", "Open Sans", "Overpass", "PT Sans\n Narrow", "Raleway", "Times New Roman".\n familysrc\n Sets the source reference on plot.ly for\n family .\n size\n \n sizesrc\n Sets the source reference on plot.ly for size\n .\n\n Returns\n -------\n plotly.graph_objs.scattergeo.hoverlabel.Font\n ' return self['font']
@property def namelength(self): "\n Sets the default length (in number of characters) of the trace\n name in the hover labels for all traces. -1 shows the whole\n name regardless of length. 0-3 shows the first 0-3 characters,\n and an integer >3 will show the whole name if it is less than\n that many characters, but if it is longer, will truncate to\n `namelength - 3` characters and add an ellipsis.\n \n The 'namelength' property is a integer and may be specified as:\n - An int (or float that will be cast to an int)\n in the interval [-1, 9223372036854775807]\n - A tuple, list, or one-dimensional numpy array of the above\n\n Returns\n -------\n int|numpy.ndarray\n " return self['namelength']
2,082,155,897,422,349,300
Sets the default length (in number of characters) of the trace name in the hover labels for all traces. -1 shows the whole name regardless of length. 0-3 shows the first 0-3 characters, and an integer >3 will show the whole name if it is less than that many characters, but if it is longer, will truncate to `namelength - 3` characters and add an ellipsis. The 'namelength' property is a integer and may be specified as: - An int (or float that will be cast to an int) in the interval [-1, 9223372036854775807] - A tuple, list, or one-dimensional numpy array of the above Returns ------- int|numpy.ndarray
plotly/graph_objs/scattergeo/__init__.py
namelength
Jonathan-MW/plotly.py
python
@property def namelength(self): "\n Sets the default length (in number of characters) of the trace\n name in the hover labels for all traces. -1 shows the whole\n name regardless of length. 0-3 shows the first 0-3 characters,\n and an integer >3 will show the whole name if it is less than\n that many characters, but if it is longer, will truncate to\n `namelength - 3` characters and add an ellipsis.\n \n The 'namelength' property is a integer and may be specified as:\n - An int (or float that will be cast to an int)\n in the interval [-1, 9223372036854775807]\n - A tuple, list, or one-dimensional numpy array of the above\n\n Returns\n -------\n int|numpy.ndarray\n " return self['namelength']
@property def namelengthsrc(self): "\n Sets the source reference on plot.ly for namelength .\n \n The 'namelengthsrc' property must be specified as a string or\n as a plotly.grid_objs.Column object\n\n Returns\n -------\n str\n " return self['namelengthsrc']
4,919,275,048,233,846,000
Sets the source reference on plot.ly for namelength . The 'namelengthsrc' property must be specified as a string or as a plotly.grid_objs.Column object Returns ------- str
plotly/graph_objs/scattergeo/__init__.py
namelengthsrc
Jonathan-MW/plotly.py
python
@property def namelengthsrc(self): "\n Sets the source reference on plot.ly for namelength .\n \n The 'namelengthsrc' property must be specified as a string or\n as a plotly.grid_objs.Column object\n\n Returns\n -------\n str\n " return self['namelengthsrc']
def __init__(self, arg=None, align=None, alignsrc=None, bgcolor=None, bgcolorsrc=None, bordercolor=None, bordercolorsrc=None, font=None, namelength=None, namelengthsrc=None, **kwargs): '\n Construct a new Hoverlabel object\n \n Parameters\n ----------\n arg\n dict of properties compatible with this constructor or\n an instance of plotly.graph_objs.scattergeo.Hoverlabel\n align\n Sets the horizontal alignment of the text content\n within hover label box. Has an effect only if the hover\n label text spans more two or more lines\n alignsrc\n Sets the source reference on plot.ly for align .\n bgcolor\n Sets the background color of the hover labels for this\n trace\n bgcolorsrc\n Sets the source reference on plot.ly for bgcolor .\n bordercolor\n Sets the border color of the hover labels for this\n trace.\n bordercolorsrc\n Sets the source reference on plot.ly for bordercolor .\n font\n Sets the font used in hover labels.\n namelength\n Sets the default length (in number of characters) of\n the trace name in the hover labels for all traces. -1\n shows the whole name regardless of length. 0-3 shows\n the first 0-3 characters, and an integer >3 will show\n the whole name if it is less than that many characters,\n but if it is longer, will truncate to `namelength - 3`\n characters and add an ellipsis.\n namelengthsrc\n Sets the source reference on plot.ly for namelength .\n\n Returns\n -------\n Hoverlabel\n ' super(Hoverlabel, self).__init__('hoverlabel') if (arg is None): arg = {} elif isinstance(arg, self.__class__): arg = arg.to_plotly_json() elif isinstance(arg, dict): arg = _copy.copy(arg) else: raise ValueError('The first argument to the plotly.graph_objs.scattergeo.Hoverlabel \nconstructor must be a dict or \nan instance of plotly.graph_objs.scattergeo.Hoverlabel') self._skip_invalid = kwargs.pop('skip_invalid', False) from plotly.validators.scattergeo import hoverlabel as v_hoverlabel self._validators['align'] = v_hoverlabel.AlignValidator() self._validators['alignsrc'] = v_hoverlabel.AlignsrcValidator() self._validators['bgcolor'] = v_hoverlabel.BgcolorValidator() self._validators['bgcolorsrc'] = v_hoverlabel.BgcolorsrcValidator() self._validators['bordercolor'] = v_hoverlabel.BordercolorValidator() self._validators['bordercolorsrc'] = v_hoverlabel.BordercolorsrcValidator() self._validators['font'] = v_hoverlabel.FontValidator() self._validators['namelength'] = v_hoverlabel.NamelengthValidator() self._validators['namelengthsrc'] = v_hoverlabel.NamelengthsrcValidator() _v = arg.pop('align', None) self['align'] = (align if (align is not None) else _v) _v = arg.pop('alignsrc', None) self['alignsrc'] = (alignsrc if (alignsrc is not None) else _v) _v = arg.pop('bgcolor', None) self['bgcolor'] = (bgcolor if (bgcolor is not None) else _v) _v = arg.pop('bgcolorsrc', None) self['bgcolorsrc'] = (bgcolorsrc if (bgcolorsrc is not None) else _v) _v = arg.pop('bordercolor', None) self['bordercolor'] = (bordercolor if (bordercolor is not None) else _v) _v = arg.pop('bordercolorsrc', None) self['bordercolorsrc'] = (bordercolorsrc if (bordercolorsrc is not None) else _v) _v = arg.pop('font', None) self['font'] = (font if (font is not None) else _v) _v = arg.pop('namelength', None) self['namelength'] = (namelength if (namelength is not None) else _v) _v = arg.pop('namelengthsrc', None) self['namelengthsrc'] = (namelengthsrc if (namelengthsrc is not None) else _v) self._process_kwargs(**dict(arg, **kwargs)) self._skip_invalid = False
-8,548,297,225,215,319,000
Construct a new Hoverlabel object Parameters ---------- arg dict of properties compatible with this constructor or an instance of plotly.graph_objs.scattergeo.Hoverlabel align Sets the horizontal alignment of the text content within hover label box. Has an effect only if the hover label text spans more two or more lines alignsrc Sets the source reference on plot.ly for align . bgcolor Sets the background color of the hover labels for this trace bgcolorsrc Sets the source reference on plot.ly for bgcolor . bordercolor Sets the border color of the hover labels for this trace. bordercolorsrc Sets the source reference on plot.ly for bordercolor . font Sets the font used in hover labels. namelength Sets the default length (in number of characters) of the trace name in the hover labels for all traces. -1 shows the whole name regardless of length. 0-3 shows the first 0-3 characters, and an integer >3 will show the whole name if it is less than that many characters, but if it is longer, will truncate to `namelength - 3` characters and add an ellipsis. namelengthsrc Sets the source reference on plot.ly for namelength . Returns ------- Hoverlabel
plotly/graph_objs/scattergeo/__init__.py
__init__
Jonathan-MW/plotly.py
python
def __init__(self, arg=None, align=None, alignsrc=None, bgcolor=None, bgcolorsrc=None, bordercolor=None, bordercolorsrc=None, font=None, namelength=None, namelengthsrc=None, **kwargs): '\n Construct a new Hoverlabel object\n \n Parameters\n ----------\n arg\n dict of properties compatible with this constructor or\n an instance of plotly.graph_objs.scattergeo.Hoverlabel\n align\n Sets the horizontal alignment of the text content\n within hover label box. Has an effect only if the hover\n label text spans more two or more lines\n alignsrc\n Sets the source reference on plot.ly for align .\n bgcolor\n Sets the background color of the hover labels for this\n trace\n bgcolorsrc\n Sets the source reference on plot.ly for bgcolor .\n bordercolor\n Sets the border color of the hover labels for this\n trace.\n bordercolorsrc\n Sets the source reference on plot.ly for bordercolor .\n font\n Sets the font used in hover labels.\n namelength\n Sets the default length (in number of characters) of\n the trace name in the hover labels for all traces. -1\n shows the whole name regardless of length. 0-3 shows\n the first 0-3 characters, and an integer >3 will show\n the whole name if it is less than that many characters,\n but if it is longer, will truncate to `namelength - 3`\n characters and add an ellipsis.\n namelengthsrc\n Sets the source reference on plot.ly for namelength .\n\n Returns\n -------\n Hoverlabel\n ' super(Hoverlabel, self).__init__('hoverlabel') if (arg is None): arg = {} elif isinstance(arg, self.__class__): arg = arg.to_plotly_json() elif isinstance(arg, dict): arg = _copy.copy(arg) else: raise ValueError('The first argument to the plotly.graph_objs.scattergeo.Hoverlabel \nconstructor must be a dict or \nan instance of plotly.graph_objs.scattergeo.Hoverlabel') self._skip_invalid = kwargs.pop('skip_invalid', False) from plotly.validators.scattergeo import hoverlabel as v_hoverlabel self._validators['align'] = v_hoverlabel.AlignValidator() self._validators['alignsrc'] = v_hoverlabel.AlignsrcValidator() self._validators['bgcolor'] = v_hoverlabel.BgcolorValidator() self._validators['bgcolorsrc'] = v_hoverlabel.BgcolorsrcValidator() self._validators['bordercolor'] = v_hoverlabel.BordercolorValidator() self._validators['bordercolorsrc'] = v_hoverlabel.BordercolorsrcValidator() self._validators['font'] = v_hoverlabel.FontValidator() self._validators['namelength'] = v_hoverlabel.NamelengthValidator() self._validators['namelengthsrc'] = v_hoverlabel.NamelengthsrcValidator() _v = arg.pop('align', None) self['align'] = (align if (align is not None) else _v) _v = arg.pop('alignsrc', None) self['alignsrc'] = (alignsrc if (alignsrc is not None) else _v) _v = arg.pop('bgcolor', None) self['bgcolor'] = (bgcolor if (bgcolor is not None) else _v) _v = arg.pop('bgcolorsrc', None) self['bgcolorsrc'] = (bgcolorsrc if (bgcolorsrc is not None) else _v) _v = arg.pop('bordercolor', None) self['bordercolor'] = (bordercolor if (bordercolor is not None) else _v) _v = arg.pop('bordercolorsrc', None) self['bordercolorsrc'] = (bordercolorsrc if (bordercolorsrc is not None) else _v) _v = arg.pop('font', None) self['font'] = (font if (font is not None) else _v) _v = arg.pop('namelength', None) self['namelength'] = (namelength if (namelength is not None) else _v) _v = arg.pop('namelengthsrc', None) self['namelengthsrc'] = (namelengthsrc if (namelengthsrc is not None) else _v) self._process_kwargs(**dict(arg, **kwargs)) self._skip_invalid = False
def log(action, *args, **kw): '\n e.g. amo.log(amo.LOG.CREATE_ADDON, []),\n amo.log(amo.LOG.ADD_FILE_TO_VERSION, file, version)\n ' from amo import get_user, logger_log from mkt.developers.models import ActivityLog, ActivityLogAttachment, AppLog, CommentLog, GroupLog, UserLog, VersionLog from mkt.access.models import Group from mkt.webapps.models import Webapp from mkt.users.models import UserProfile from mkt.versions.models import Version user = kw.get('user', get_user()) if (not user): logger_log.warning(('Activity log called with no user: %s' % action.id)) return al = ActivityLog(user=user, action=action.id) al.arguments = args if ('details' in kw): al.details = kw['details'] al.save() if (('details' in kw) and ('comments' in al.details)): CommentLog(comments=al.details['comments'], activity_log=al).save() if ('created' in kw): al.created = kw['created'] al.save() if ('attachments' in kw): formset = kw['attachments'] storage = get_storage_class()() for form in formset: data = form.cleaned_data if ('attachment' in data): attachment = data['attachment'] storage.save(('%s/%s' % (settings.REVIEWER_ATTACHMENTS_PATH, attachment.name)), attachment) ActivityLogAttachment(activity_log=al, description=data['description'], mimetype=attachment.content_type, filepath=attachment.name).save() for arg in args: if isinstance(arg, tuple): if (arg[0] == Webapp): AppLog(addon_id=arg[1], activity_log=al).save() elif (arg[0] == Version): VersionLog(version_id=arg[1], activity_log=al).save() elif (arg[0] == UserProfile): UserLog(user_id=arg[1], activity_log=al).save() elif (arg[0] == Group): GroupLog(group_id=arg[1], activity_log=al).save() if isinstance(arg, Webapp): AppLog(addon=arg, activity_log=al).save() elif isinstance(arg, Version): VersionLog(version=arg, activity_log=al).save() elif isinstance(arg, UserProfile): UserLog(activity_log=al, user=arg).save() elif isinstance(arg, Group): GroupLog(group=arg, activity_log=al).save() UserLog(activity_log=al, user=user).save() return al
-3,370,312,568,555,220,500
e.g. amo.log(amo.LOG.CREATE_ADDON, []), amo.log(amo.LOG.ADD_FILE_TO_VERSION, file, version)
apps/amo/log.py
log
muffinresearch/zamboni
python
def log(action, *args, **kw): '\n e.g. amo.log(amo.LOG.CREATE_ADDON, []),\n amo.log(amo.LOG.ADD_FILE_TO_VERSION, file, version)\n ' from amo import get_user, logger_log from mkt.developers.models import ActivityLog, ActivityLogAttachment, AppLog, CommentLog, GroupLog, UserLog, VersionLog from mkt.access.models import Group from mkt.webapps.models import Webapp from mkt.users.models import UserProfile from mkt.versions.models import Version user = kw.get('user', get_user()) if (not user): logger_log.warning(('Activity log called with no user: %s' % action.id)) return al = ActivityLog(user=user, action=action.id) al.arguments = args if ('details' in kw): al.details = kw['details'] al.save() if (('details' in kw) and ('comments' in al.details)): CommentLog(comments=al.details['comments'], activity_log=al).save() if ('created' in kw): al.created = kw['created'] al.save() if ('attachments' in kw): formset = kw['attachments'] storage = get_storage_class()() for form in formset: data = form.cleaned_data if ('attachment' in data): attachment = data['attachment'] storage.save(('%s/%s' % (settings.REVIEWER_ATTACHMENTS_PATH, attachment.name)), attachment) ActivityLogAttachment(activity_log=al, description=data['description'], mimetype=attachment.content_type, filepath=attachment.name).save() for arg in args: if isinstance(arg, tuple): if (arg[0] == Webapp): AppLog(addon_id=arg[1], activity_log=al).save() elif (arg[0] == Version): VersionLog(version_id=arg[1], activity_log=al).save() elif (arg[0] == UserProfile): UserLog(user_id=arg[1], activity_log=al).save() elif (arg[0] == Group): GroupLog(group_id=arg[1], activity_log=al).save() if isinstance(arg, Webapp): AppLog(addon=arg, activity_log=al).save() elif isinstance(arg, Version): VersionLog(version=arg, activity_log=al).save() elif isinstance(arg, UserProfile): UserLog(activity_log=al, user=arg).save() elif isinstance(arg, Group): GroupLog(group=arg, activity_log=al).save() UserLog(activity_log=al, user=user).save() return al
def getTestMethod(vb): 'Create a test method' def testMethod(self): try: buildParseTree(vb, dialect='vb.net') except VBParserError: raise Exception(('Unable to parse ...\n%s' % vb)) return testMethod
6,359,892,563,676,321,000
Create a test method
vb2py/test/testdotnet.py
getTestMethod
ceprio/xl_vb2py
python
def getTestMethod(vb): def testMethod(self): try: buildParseTree(vb, dialect='vb.net') except VBParserError: raise Exception(('Unable to parse ...\n%s' % vb)) return testMethod
def find_tmc_devices(vendor=None, product=None, serial_number=None, custom_match=None, **kwargs): 'Find connected USBTMC devices. See usbutil.find_devices for more info.\n\n ' def is_usbtmc(dev): if (custom_match and (not custom_match(dev))): return False return bool(find_interfaces(dev, bInterfaceClass=254, bInterfaceSubClass=3)) return find_devices(vendor, product, serial_number, is_usbtmc, **kwargs)
-3,050,417,852,705,620,000
Find connected USBTMC devices. See usbutil.find_devices for more info.
pyvisa-py/protocols/usbtmc.py
find_tmc_devices
circuitfox/pyvisa-py
python
def find_tmc_devices(vendor=None, product=None, serial_number=None, custom_match=None, **kwargs): '\n\n ' def is_usbtmc(dev): if (custom_match and (not custom_match(dev))): return False return bool(find_interfaces(dev, bInterfaceClass=254, bInterfaceSubClass=3)) return find_devices(vendor, product, serial_number, is_usbtmc, **kwargs)
@classmethod def from_quirky(cls, data): 'Constructs a correct response for quirky devices' (msgid, btag, btaginverse) = struct.unpack_from('BBBx', data) data = data.rstrip(b'\x00') if (';' in str(data)): (transfer_size, transfer_attributes) = struct.unpack_from('<LBxxx', data, 4) data = data[12:] else: transfer_size = 0 transfer_attributes = 1 return cls(msgid, btag, btaginverse, transfer_size, transfer_attributes, data)
-6,306,606,062,144,896,000
Constructs a correct response for quirky devices
pyvisa-py/protocols/usbtmc.py
from_quirky
circuitfox/pyvisa-py
python
@classmethod def from_quirky(cls, data): (msgid, btag, btaginverse) = struct.unpack_from('BBBx', data) data = data.rstrip(b'\x00') if (';' in str(data)): (transfer_size, transfer_attributes) = struct.unpack_from('<LBxxx', data, 4) data = data[12:] else: transfer_size = 0 transfer_attributes = 1 return cls(msgid, btag, btaginverse, transfer_size, transfer_attributes, data)
@staticmethod def build_array(btag, transfer_size, term_char=None): '\n\n :param transfer_size:\n :param btag:\n :param term_char:\n :return:\n ' if (term_char is None): transfer_attributes = 0 term_char = 0 else: transfer_attributes = 2 return (struct.pack('BBBx', MsgID.request_dev_dep_msg_in, btag, ((~ btag) & 255)) + struct.pack('<LBBxx', transfer_size, transfer_attributes, term_char))
6,151,967,743,317,589,000
:param transfer_size: :param btag: :param term_char: :return:
pyvisa-py/protocols/usbtmc.py
build_array
circuitfox/pyvisa-py
python
@staticmethod def build_array(btag, transfer_size, term_char=None): '\n\n :param transfer_size:\n :param btag:\n :param term_char:\n :return:\n ' if (term_char is None): transfer_attributes = 0 term_char = 0 else: transfer_attributes = 2 return (struct.pack('BBBx', MsgID.request_dev_dep_msg_in, btag, ((~ btag) & 255)) + struct.pack('<LBBxx', transfer_size, transfer_attributes, term_char))
def write(self, data): 'Send raw bytes to the instrument.\n\n :param data: bytes to be sent to the instrument\n :type data: bytes\n ' try: return self.usb_send_ep.write(data) except usb.core.USBError as e: raise ValueError(str(e))
5,437,309,802,308,429,000
Send raw bytes to the instrument. :param data: bytes to be sent to the instrument :type data: bytes
pyvisa-py/protocols/usbtmc.py
write
circuitfox/pyvisa-py
python
def write(self, data): 'Send raw bytes to the instrument.\n\n :param data: bytes to be sent to the instrument\n :type data: bytes\n ' try: return self.usb_send_ep.write(data) except usb.core.USBError as e: raise ValueError(str(e))
def read(self, size): 'Receive raw bytes to the instrument.\n\n :param size: number of bytes to receive\n :return: received bytes\n :return type: bytes\n ' if (size <= 0): size = 1 data = array_to_bytes(self.usb_recv_ep.read(size, self.timeout)) return data
6,909,853,885,350,742,000
Receive raw bytes to the instrument. :param size: number of bytes to receive :return: received bytes :return type: bytes
pyvisa-py/protocols/usbtmc.py
read
circuitfox/pyvisa-py
python
def read(self, size): 'Receive raw bytes to the instrument.\n\n :param size: number of bytes to receive\n :return: received bytes\n :return type: bytes\n ' if (size <= 0): size = 1 data = array_to_bytes(self.usb_recv_ep.read(size, self.timeout)) return data
def _abort_bulk_in(self, btag): 'Request that the device abort a pending Bulk-IN operation.' abort_timeout_ms = 5000 data = self.usb_dev.ctrl_transfer(usb.util.build_request_type(usb.util.CTRL_IN, usb.util.CTRL_TYPE_CLASS, usb.util.CTRL_RECIPIENT_ENDPOINT), Request.initiate_abort_bulk_in, btag, self.usb_recv_ep.bEndpointAddress, 2, timeout=abort_timeout_ms) if (data[0] != UsbTmcStatus.success): return self.usb_recv_ep.read(self.RECV_CHUNK, abort_timeout_ms) for retry in range(100): data = self.usb_dev.ctrl_transfer(usb.util.build_request_type(usb.util.CTRL_IN, usb.util.CTRL_TYPE_CLASS, usb.util.CTRL_RECIPIENT_ENDPOINT), Request.check_abort_bulk_in_status, 0, self.usb_recv_ep.bEndpointAddress, 8, timeout=abort_timeout_ms) if (data[0] != UsbTmcStatus.pending): break time.sleep(0.05)
-9,008,425,439,862,839,000
Request that the device abort a pending Bulk-IN operation.
pyvisa-py/protocols/usbtmc.py
_abort_bulk_in
circuitfox/pyvisa-py
python
def _abort_bulk_in(self, btag): abort_timeout_ms = 5000 data = self.usb_dev.ctrl_transfer(usb.util.build_request_type(usb.util.CTRL_IN, usb.util.CTRL_TYPE_CLASS, usb.util.CTRL_RECIPIENT_ENDPOINT), Request.initiate_abort_bulk_in, btag, self.usb_recv_ep.bEndpointAddress, 2, timeout=abort_timeout_ms) if (data[0] != UsbTmcStatus.success): return self.usb_recv_ep.read(self.RECV_CHUNK, abort_timeout_ms) for retry in range(100): data = self.usb_dev.ctrl_transfer(usb.util.build_request_type(usb.util.CTRL_IN, usb.util.CTRL_TYPE_CLASS, usb.util.CTRL_RECIPIENT_ENDPOINT), Request.check_abort_bulk_in_status, 0, self.usb_recv_ep.bEndpointAddress, 8, timeout=abort_timeout_ms) if (data[0] != UsbTmcStatus.pending): break time.sleep(0.05)