body
stringlengths 26
98.2k
| body_hash
int64 -9,222,864,604,528,158,000
9,221,803,474B
| docstring
stringlengths 1
16.8k
| path
stringlengths 5
230
| name
stringlengths 1
96
| repository_name
stringlengths 7
89
| lang
stringclasses 1
value | body_without_docstring
stringlengths 20
98.2k
|
---|---|---|---|---|---|---|---|
def get_triples(self, graph_type: str) -> str:
'Retrieves the contents of the specified graph as triples encoded in turtle format\n\n Parameters\n ----------\n graph_type : str\n Graph type, either "instance" or "schema".\n\n Raises\n ------\n InterfaceError\n if the client does not connect to a database\n\n Returns\n -------\n str\n '
raise InterfaceError('get_triples is temporary not avaliable in this version')
self._check_connection()
self._validate_graph_type(graph_type)
result = requests.get(self._triples_url(graph_type), headers={'user-agent': f'terminusdb-client-python/{__version__}'}, auth=self._auth())
return json.loads(_finish_response(result)) | -9,054,580,359,083,694,000 | Retrieves the contents of the specified graph as triples encoded in turtle format
Parameters
----------
graph_type : str
Graph type, either "instance" or "schema".
Raises
------
InterfaceError
if the client does not connect to a database
Returns
-------
str | terminusdb_client/woqlclient/woqlClient.py | get_triples | terminusdb/woql-client-p | python | def get_triples(self, graph_type: str) -> str:
'Retrieves the contents of the specified graph as triples encoded in turtle format\n\n Parameters\n ----------\n graph_type : str\n Graph type, either "instance" or "schema".\n\n Raises\n ------\n InterfaceError\n if the client does not connect to a database\n\n Returns\n -------\n str\n '
raise InterfaceError('get_triples is temporary not avaliable in this version')
self._check_connection()
self._validate_graph_type(graph_type)
result = requests.get(self._triples_url(graph_type), headers={'user-agent': f'terminusdb-client-python/{__version__}'}, auth=self._auth())
return json.loads(_finish_response(result)) |
def update_triples(self, graph_type: str, turtle, commit_msg: str) -> None:
'Updates the contents of the specified graph with the triples encoded in turtle format Replaces the entire graph contents\n\n Parameters\n ----------\n graph_type : str\n Graph type, either "instance" or "schema".\n turtle\n Valid set of triples in Turtle format.\n commit_msg : str\n Commit message.\n\n Raises\n ------\n InterfaceError\n if the client does not connect to a database\n '
raise InterfaceError('update_triples is temporary not avaliable in this version')
self._check_connection()
self._validate_graph_type(graph_type)
params = {'commit_info': self._generate_commit(commit_msg)}
params['turtle'] = turtle
result = requests.post(self._triples_url(graph_type), headers={'user-agent': f'terminusdb-client-python/{__version__}'}, params=params, auth=self._auth())
return json.loads(_finish_response(result)) | -9,104,398,038,183,055,000 | Updates the contents of the specified graph with the triples encoded in turtle format Replaces the entire graph contents
Parameters
----------
graph_type : str
Graph type, either "instance" or "schema".
turtle
Valid set of triples in Turtle format.
commit_msg : str
Commit message.
Raises
------
InterfaceError
if the client does not connect to a database | terminusdb_client/woqlclient/woqlClient.py | update_triples | terminusdb/woql-client-p | python | def update_triples(self, graph_type: str, turtle, commit_msg: str) -> None:
'Updates the contents of the specified graph with the triples encoded in turtle format Replaces the entire graph contents\n\n Parameters\n ----------\n graph_type : str\n Graph type, either "instance" or "schema".\n turtle\n Valid set of triples in Turtle format.\n commit_msg : str\n Commit message.\n\n Raises\n ------\n InterfaceError\n if the client does not connect to a database\n '
raise InterfaceError('update_triples is temporary not avaliable in this version')
self._check_connection()
self._validate_graph_type(graph_type)
params = {'commit_info': self._generate_commit(commit_msg)}
params['turtle'] = turtle
result = requests.post(self._triples_url(graph_type), headers={'user-agent': f'terminusdb-client-python/{__version__}'}, params=params, auth=self._auth())
return json.loads(_finish_response(result)) |
def insert_triples(self, graph_type: str, turtle, commit_msg: Optional[str]=None) -> None:
'Inserts into the specified graph with the triples encoded in turtle format.\n\n Parameters\n ----------\n graph_type : str\n Graph type, either "instance" or "schema".\n turtle\n Valid set of triples in Turtle format.\n commit_msg : str\n Commit message.\n\n Raises\n ------\n InterfaceError\n if the client does not connect to a database\n '
raise InterfaceError('insert_triples is temporary not avaliable in this version')
self._check_connection()
self._validate_graph_type(graph_type)
params = {'commit_info': self._generate_commit(commit_msg)}
params['turtle'] = turtle
result = requests.put(self._triples_url(graph_type), headers={'user-agent': f'terminusdb-client-python/{__version__}'}, params=params, auth=self._auth())
return json.loads(_finish_response(result)) | -8,400,336,033,088,321,000 | Inserts into the specified graph with the triples encoded in turtle format.
Parameters
----------
graph_type : str
Graph type, either "instance" or "schema".
turtle
Valid set of triples in Turtle format.
commit_msg : str
Commit message.
Raises
------
InterfaceError
if the client does not connect to a database | terminusdb_client/woqlclient/woqlClient.py | insert_triples | terminusdb/woql-client-p | python | def insert_triples(self, graph_type: str, turtle, commit_msg: Optional[str]=None) -> None:
'Inserts into the specified graph with the triples encoded in turtle format.\n\n Parameters\n ----------\n graph_type : str\n Graph type, either "instance" or "schema".\n turtle\n Valid set of triples in Turtle format.\n commit_msg : str\n Commit message.\n\n Raises\n ------\n InterfaceError\n if the client does not connect to a database\n '
raise InterfaceError('insert_triples is temporary not avaliable in this version')
self._check_connection()
self._validate_graph_type(graph_type)
params = {'commit_info': self._generate_commit(commit_msg)}
params['turtle'] = turtle
result = requests.put(self._triples_url(graph_type), headers={'user-agent': f'terminusdb-client-python/{__version__}'}, params=params, auth=self._auth())
return json.loads(_finish_response(result)) |
def query_document(self, document_template: dict, graph_type: str='instance', skip: int=0, count: Optional[int]=None, as_list: bool=False, get_data_version: bool=False, **kwargs) -> Union[(Iterable, list)]:
'Retrieves all documents that match a given document template\n\n Parameters\n ----------\n document_template : dict\n Template for the document that is being retrived\n graph_type : str, optional\n Graph type, either "instance" or "schema".\n as_list: bool\n If the result returned as list rather than an iterator.\n get_data_version: bool\n If the data version of the document(s) should be obtained. If True, the method return the result and the version as a tuple.\n\n Raises\n ------\n InterfaceError\n if the client does not connect to a database\n\n Returns\n -------\n Iterable\n '
self._validate_graph_type(graph_type)
self._check_connection()
payload = {'query': document_template, 'graph_type': graph_type}
payload['skip'] = skip
if (count is not None):
payload['count'] = count
add_args = ['prefixed', 'minimized', 'unfold']
for the_arg in add_args:
if (the_arg in kwargs):
payload[the_arg] = kwargs[the_arg]
result = requests.post(self._documents_url(), headers={'user-agent': f'terminusdb-client-python/{__version__}', 'X-HTTP-Method-Override': 'GET'}, json=payload, auth=self._auth())
if get_data_version:
(result, version) = _finish_response(result, get_data_version)
return_obj = _result2stream(result)
if as_list:
return (list(return_obj), version)
else:
return (return_obj, version)
return_obj = _result2stream(_finish_response(result))
if as_list:
return list(return_obj)
else:
return return_obj | -103,947,400,020,023,520 | Retrieves all documents that match a given document template
Parameters
----------
document_template : dict
Template for the document that is being retrived
graph_type : str, optional
Graph type, either "instance" or "schema".
as_list: bool
If the result returned as list rather than an iterator.
get_data_version: bool
If the data version of the document(s) should be obtained. If True, the method return the result and the version as a tuple.
Raises
------
InterfaceError
if the client does not connect to a database
Returns
-------
Iterable | terminusdb_client/woqlclient/woqlClient.py | query_document | terminusdb/woql-client-p | python | def query_document(self, document_template: dict, graph_type: str='instance', skip: int=0, count: Optional[int]=None, as_list: bool=False, get_data_version: bool=False, **kwargs) -> Union[(Iterable, list)]:
'Retrieves all documents that match a given document template\n\n Parameters\n ----------\n document_template : dict\n Template for the document that is being retrived\n graph_type : str, optional\n Graph type, either "instance" or "schema".\n as_list: bool\n If the result returned as list rather than an iterator.\n get_data_version: bool\n If the data version of the document(s) should be obtained. If True, the method return the result and the version as a tuple.\n\n Raises\n ------\n InterfaceError\n if the client does not connect to a database\n\n Returns\n -------\n Iterable\n '
self._validate_graph_type(graph_type)
self._check_connection()
payload = {'query': document_template, 'graph_type': graph_type}
payload['skip'] = skip
if (count is not None):
payload['count'] = count
add_args = ['prefixed', 'minimized', 'unfold']
for the_arg in add_args:
if (the_arg in kwargs):
payload[the_arg] = kwargs[the_arg]
result = requests.post(self._documents_url(), headers={'user-agent': f'terminusdb-client-python/{__version__}', 'X-HTTP-Method-Override': 'GET'}, json=payload, auth=self._auth())
if get_data_version:
(result, version) = _finish_response(result, get_data_version)
return_obj = _result2stream(result)
if as_list:
return (list(return_obj), version)
else:
return (return_obj, version)
return_obj = _result2stream(_finish_response(result))
if as_list:
return list(return_obj)
else:
return return_obj |
def get_document(self, iri_id: str, graph_type: str='instance', get_data_version: bool=False, **kwargs) -> dict:
'Retrieves the document of the iri_id\n\n Parameters\n ----------\n iri_id : str\n Iri id for the docuemnt that is retriving\n graph_type : str, optional\n Graph type, either "instance" or "schema".\n get_data_version: bool\n If the data version of the document(s) should be obtained. If True, the method return the result and the version as a tuple.\n kwargs:\n Additional boolean flags for retriving. Currently avaliable: "prefixed", "minimized", "unfold"\n\n Raises\n ------\n InterfaceError\n if the client does not connect to a database\n\n Returns\n -------\n dict\n '
self._validate_graph_type(graph_type)
add_args = ['prefixed', 'minimized', 'unfold']
self._check_connection()
payload = {'id': iri_id, 'graph_type': graph_type}
for the_arg in add_args:
if (the_arg in kwargs):
payload[the_arg] = kwargs[the_arg]
result = requests.get(self._documents_url(), headers={'user-agent': f'terminusdb-client-python/{__version__}'}, params=payload, auth=self._auth())
if get_data_version:
(result, version) = _finish_response(result, get_data_version)
return (json.loads(result), version)
return json.loads(_finish_response(result)) | -6,171,475,609,696,768,000 | Retrieves the document of the iri_id
Parameters
----------
iri_id : str
Iri id for the docuemnt that is retriving
graph_type : str, optional
Graph type, either "instance" or "schema".
get_data_version: bool
If the data version of the document(s) should be obtained. If True, the method return the result and the version as a tuple.
kwargs:
Additional boolean flags for retriving. Currently avaliable: "prefixed", "minimized", "unfold"
Raises
------
InterfaceError
if the client does not connect to a database
Returns
-------
dict | terminusdb_client/woqlclient/woqlClient.py | get_document | terminusdb/woql-client-p | python | def get_document(self, iri_id: str, graph_type: str='instance', get_data_version: bool=False, **kwargs) -> dict:
'Retrieves the document of the iri_id\n\n Parameters\n ----------\n iri_id : str\n Iri id for the docuemnt that is retriving\n graph_type : str, optional\n Graph type, either "instance" or "schema".\n get_data_version: bool\n If the data version of the document(s) should be obtained. If True, the method return the result and the version as a tuple.\n kwargs:\n Additional boolean flags for retriving. Currently avaliable: "prefixed", "minimized", "unfold"\n\n Raises\n ------\n InterfaceError\n if the client does not connect to a database\n\n Returns\n -------\n dict\n '
self._validate_graph_type(graph_type)
add_args = ['prefixed', 'minimized', 'unfold']
self._check_connection()
payload = {'id': iri_id, 'graph_type': graph_type}
for the_arg in add_args:
if (the_arg in kwargs):
payload[the_arg] = kwargs[the_arg]
result = requests.get(self._documents_url(), headers={'user-agent': f'terminusdb-client-python/{__version__}'}, params=payload, auth=self._auth())
if get_data_version:
(result, version) = _finish_response(result, get_data_version)
return (json.loads(result), version)
return json.loads(_finish_response(result)) |
def get_documents_by_type(self, doc_type: str, graph_type: str='instance', skip: int=0, count: Optional[int]=None, as_list: bool=False, get_data_version=False, **kwargs) -> Union[(Iterable, list)]:
'Retrieves the documents by type\n\n Parameters\n ----------\n doc_type : str\n Specific type for the docuemnts that is retriving\n graph_type : str, optional\n Graph type, either "instance" or "schema".\n skip: int\n The starting posiion of the returning results, default to be 0\n count: int or None\n The maximum number of returned result, if None (default) it will return all of the avalible result.\n as_list: bool\n If the result returned as list rather than an iterator.\n get_data_version: bool\n If the version of the document(s) should be obtained. If True, the method return the result and the version as a tuple.\n kwargs:\n Additional boolean flags for retriving. Currently avaliable: "prefixed", "unfold"\n\n Raises\n ------\n InterfaceError\n if the client does not connect to a database\n\n Returns\n -------\n iterable\n Stream of dictionaries\n '
self._validate_graph_type(graph_type)
add_args = ['prefixed', 'unfold']
self._check_connection()
payload = {'type': doc_type, 'graph_type': graph_type}
payload['skip'] = skip
if (count is not None):
payload['count'] = count
for the_arg in add_args:
if (the_arg in kwargs):
payload[the_arg] = kwargs[the_arg]
result = requests.get(self._documents_url(), headers={'user-agent': f'terminusdb-client-python/{__version__}'}, params=payload, auth=self._auth())
if get_data_version:
(result, version) = _finish_response(result, get_data_version)
return_obj = _result2stream(result)
if as_list:
return (list(return_obj), version)
else:
return (return_obj, version)
return_obj = _result2stream(_finish_response(result))
if as_list:
return list(return_obj)
else:
return return_obj | 1,406,150,879,367,926,300 | Retrieves the documents by type
Parameters
----------
doc_type : str
Specific type for the docuemnts that is retriving
graph_type : str, optional
Graph type, either "instance" or "schema".
skip: int
The starting posiion of the returning results, default to be 0
count: int or None
The maximum number of returned result, if None (default) it will return all of the avalible result.
as_list: bool
If the result returned as list rather than an iterator.
get_data_version: bool
If the version of the document(s) should be obtained. If True, the method return the result and the version as a tuple.
kwargs:
Additional boolean flags for retriving. Currently avaliable: "prefixed", "unfold"
Raises
------
InterfaceError
if the client does not connect to a database
Returns
-------
iterable
Stream of dictionaries | terminusdb_client/woqlclient/woqlClient.py | get_documents_by_type | terminusdb/woql-client-p | python | def get_documents_by_type(self, doc_type: str, graph_type: str='instance', skip: int=0, count: Optional[int]=None, as_list: bool=False, get_data_version=False, **kwargs) -> Union[(Iterable, list)]:
'Retrieves the documents by type\n\n Parameters\n ----------\n doc_type : str\n Specific type for the docuemnts that is retriving\n graph_type : str, optional\n Graph type, either "instance" or "schema".\n skip: int\n The starting posiion of the returning results, default to be 0\n count: int or None\n The maximum number of returned result, if None (default) it will return all of the avalible result.\n as_list: bool\n If the result returned as list rather than an iterator.\n get_data_version: bool\n If the version of the document(s) should be obtained. If True, the method return the result and the version as a tuple.\n kwargs:\n Additional boolean flags for retriving. Currently avaliable: "prefixed", "unfold"\n\n Raises\n ------\n InterfaceError\n if the client does not connect to a database\n\n Returns\n -------\n iterable\n Stream of dictionaries\n '
self._validate_graph_type(graph_type)
add_args = ['prefixed', 'unfold']
self._check_connection()
payload = {'type': doc_type, 'graph_type': graph_type}
payload['skip'] = skip
if (count is not None):
payload['count'] = count
for the_arg in add_args:
if (the_arg in kwargs):
payload[the_arg] = kwargs[the_arg]
result = requests.get(self._documents_url(), headers={'user-agent': f'terminusdb-client-python/{__version__}'}, params=payload, auth=self._auth())
if get_data_version:
(result, version) = _finish_response(result, get_data_version)
return_obj = _result2stream(result)
if as_list:
return (list(return_obj), version)
else:
return (return_obj, version)
return_obj = _result2stream(_finish_response(result))
if as_list:
return list(return_obj)
else:
return return_obj |
def get_all_documents(self, graph_type: str='instance', skip: int=0, count: Optional[int]=None, as_list: bool=False, get_data_version: bool=False, **kwargs) -> Union[(Iterable, list, tuple)]:
'Retrieves all avalibale the documents\n\n Parameters\n ----------\n graph_type : str, optional\n Graph type, either "instance" or "schema".\n skip: int\n The starting posiion of the returning results, default to be 0\n count: int or None\n The maximum number of returned result, if None (default) it will return all of the avalible result.\n as_list: bool\n If the result returned as list rather than an iterator.\n get_data_version: bool\n If the version of the document(s) should be obtained. If True, the method return the result and the version as a tuple.\n kwargs:\n Additional boolean flags for retriving. Currently avaliable: "prefixed", "unfold"\n\n Raises\n ------\n InterfaceError\n if the client does not connect to a database\n\n Returns\n -------\n iterable\n Stream of dictionaries\n '
self._validate_graph_type(graph_type)
add_args = ['prefixed', 'unfold']
self._check_connection()
payload = {'graph_type': graph_type}
payload['skip'] = skip
if (count is not None):
payload['count'] = count
for the_arg in add_args:
if (the_arg in kwargs):
payload[the_arg] = kwargs[the_arg]
result = requests.get(self._documents_url(), headers={'user-agent': f'terminusdb-client-python/{__version__}'}, params=payload, auth=self._auth())
if get_data_version:
(result, version) = _finish_response(result, get_data_version)
return_obj = _result2stream(result)
if as_list:
return (list(return_obj), version)
else:
return (return_obj, version)
return_obj = _result2stream(_finish_response(result))
if as_list:
return list(return_obj)
else:
return return_obj | 949,907,830,305,204,700 | Retrieves all avalibale the documents
Parameters
----------
graph_type : str, optional
Graph type, either "instance" or "schema".
skip: int
The starting posiion of the returning results, default to be 0
count: int or None
The maximum number of returned result, if None (default) it will return all of the avalible result.
as_list: bool
If the result returned as list rather than an iterator.
get_data_version: bool
If the version of the document(s) should be obtained. If True, the method return the result and the version as a tuple.
kwargs:
Additional boolean flags for retriving. Currently avaliable: "prefixed", "unfold"
Raises
------
InterfaceError
if the client does not connect to a database
Returns
-------
iterable
Stream of dictionaries | terminusdb_client/woqlclient/woqlClient.py | get_all_documents | terminusdb/woql-client-p | python | def get_all_documents(self, graph_type: str='instance', skip: int=0, count: Optional[int]=None, as_list: bool=False, get_data_version: bool=False, **kwargs) -> Union[(Iterable, list, tuple)]:
'Retrieves all avalibale the documents\n\n Parameters\n ----------\n graph_type : str, optional\n Graph type, either "instance" or "schema".\n skip: int\n The starting posiion of the returning results, default to be 0\n count: int or None\n The maximum number of returned result, if None (default) it will return all of the avalible result.\n as_list: bool\n If the result returned as list rather than an iterator.\n get_data_version: bool\n If the version of the document(s) should be obtained. If True, the method return the result and the version as a tuple.\n kwargs:\n Additional boolean flags for retriving. Currently avaliable: "prefixed", "unfold"\n\n Raises\n ------\n InterfaceError\n if the client does not connect to a database\n\n Returns\n -------\n iterable\n Stream of dictionaries\n '
self._validate_graph_type(graph_type)
add_args = ['prefixed', 'unfold']
self._check_connection()
payload = {'graph_type': graph_type}
payload['skip'] = skip
if (count is not None):
payload['count'] = count
for the_arg in add_args:
if (the_arg in kwargs):
payload[the_arg] = kwargs[the_arg]
result = requests.get(self._documents_url(), headers={'user-agent': f'terminusdb-client-python/{__version__}'}, params=payload, auth=self._auth())
if get_data_version:
(result, version) = _finish_response(result, get_data_version)
return_obj = _result2stream(result)
if as_list:
return (list(return_obj), version)
else:
return (return_obj, version)
return_obj = _result2stream(_finish_response(result))
if as_list:
return list(return_obj)
else:
return return_obj |
def get_existing_classes(self):
'Get all the existing classes (only ids) in a database.'
all_existing_obj = self.get_all_documents(graph_type='schema')
all_existing_class = {}
for item in all_existing_obj:
if item.get('@id'):
all_existing_class[item['@id']] = item
return all_existing_class | 2,001,867,462,390,230,800 | Get all the existing classes (only ids) in a database. | terminusdb_client/woqlclient/woqlClient.py | get_existing_classes | terminusdb/woql-client-p | python | def get_existing_classes(self):
all_existing_obj = self.get_all_documents(graph_type='schema')
all_existing_class = {}
for item in all_existing_obj:
if item.get('@id'):
all_existing_class[item['@id']] = item
return all_existing_class |
def insert_document(self, document: Union[(dict, List[dict], 'WOQLSchema', 'DocumentTemplate', List['DocumentTemplate'])], graph_type: str='instance', full_replace: bool=False, commit_msg: Optional[str]=None, last_data_version: Optional[str]=None, compress: Union[(str, int)]=1024) -> None:
'Inserts the specified document(s)\n\n Parameters\n ----------\n document: dict or list of dict\n Document(s) to be inserted.\n graph_type : str\n Graph type, either "inference", "instance" or "schema".\n full_replace:: bool\n If True then the whole graph will be replaced. WARNING: you should also supply the context object as the first element in the list of documents if using this option.\n commit_msg : str\n Commit message.\n last_data_version : str\n Last version before the update, used to check if the document has been changed unknowingly\n compress : str or int\n If it is an integer, size of the data larger than this (in bytes) will be compress with gzip in the request (assume encoding as UTF-8, 0 = always compress). If it is `never` it will never compress the data.\n\n Raises\n ------\n InterfaceError\n if the client does not connect to a database\n\n Returns\n -------\n list\n list of ids of the inseted docuemnts\n '
self._validate_graph_type(graph_type)
self._check_connection()
params = self._generate_commit(commit_msg)
params['graph_type'] = graph_type
if full_replace:
params['full_replace'] = 'true'
else:
params['full_replace'] = 'false'
headers = {'user-agent': f'terminusdb-client-python/{__version__}'}
if (last_data_version is not None):
headers['TerminusDB-Data-Version'] = last_data_version
new_doc = self._convert_dcoument(document, graph_type)
if (len(new_doc) == 0):
return
elif (not isinstance(new_doc, list)):
new_doc = [new_doc]
if full_replace:
if (new_doc[0].get('@type') != '@context'):
raise ValueError('The first item in docuemnt need to be dictionary representing the context object.')
elif (new_doc[0].get('@type') == '@context'):
warnings.warn('To replace context, need to use `full_replace` or `replace_document`, skipping context object now.')
new_doc.pop(0)
json_string = json.dumps(new_doc).encode('utf-8')
if ((compress != 'never') and (len(json_string) > compress)):
headers.update({'Content-Encoding': 'gzip', 'Content-Type': 'application/json'})
result = requests.post(self._documents_url(), headers=headers, params=params, data=gzip.compress(json_string), auth=self._auth())
else:
result = requests.post(self._documents_url(), headers=headers, params=params, json=new_doc, auth=self._auth())
result = json.loads(_finish_response(result))
if isinstance(document, list):
for (idx, item) in enumerate(document):
if (hasattr(item, '_obj_to_dict') and (not hasattr(item, '_backend_id'))):
item._backend_id = result[idx][len('terminusdb:///data/'):]
return result | 4,374,205,126,363,328,000 | Inserts the specified document(s)
Parameters
----------
document: dict or list of dict
Document(s) to be inserted.
graph_type : str
Graph type, either "inference", "instance" or "schema".
full_replace:: bool
If True then the whole graph will be replaced. WARNING: you should also supply the context object as the first element in the list of documents if using this option.
commit_msg : str
Commit message.
last_data_version : str
Last version before the update, used to check if the document has been changed unknowingly
compress : str or int
If it is an integer, size of the data larger than this (in bytes) will be compress with gzip in the request (assume encoding as UTF-8, 0 = always compress). If it is `never` it will never compress the data.
Raises
------
InterfaceError
if the client does not connect to a database
Returns
-------
list
list of ids of the inseted docuemnts | terminusdb_client/woqlclient/woqlClient.py | insert_document | terminusdb/woql-client-p | python | def insert_document(self, document: Union[(dict, List[dict], 'WOQLSchema', 'DocumentTemplate', List['DocumentTemplate'])], graph_type: str='instance', full_replace: bool=False, commit_msg: Optional[str]=None, last_data_version: Optional[str]=None, compress: Union[(str, int)]=1024) -> None:
'Inserts the specified document(s)\n\n Parameters\n ----------\n document: dict or list of dict\n Document(s) to be inserted.\n graph_type : str\n Graph type, either "inference", "instance" or "schema".\n full_replace:: bool\n If True then the whole graph will be replaced. WARNING: you should also supply the context object as the first element in the list of documents if using this option.\n commit_msg : str\n Commit message.\n last_data_version : str\n Last version before the update, used to check if the document has been changed unknowingly\n compress : str or int\n If it is an integer, size of the data larger than this (in bytes) will be compress with gzip in the request (assume encoding as UTF-8, 0 = always compress). If it is `never` it will never compress the data.\n\n Raises\n ------\n InterfaceError\n if the client does not connect to a database\n\n Returns\n -------\n list\n list of ids of the inseted docuemnts\n '
self._validate_graph_type(graph_type)
self._check_connection()
params = self._generate_commit(commit_msg)
params['graph_type'] = graph_type
if full_replace:
params['full_replace'] = 'true'
else:
params['full_replace'] = 'false'
headers = {'user-agent': f'terminusdb-client-python/{__version__}'}
if (last_data_version is not None):
headers['TerminusDB-Data-Version'] = last_data_version
new_doc = self._convert_dcoument(document, graph_type)
if (len(new_doc) == 0):
return
elif (not isinstance(new_doc, list)):
new_doc = [new_doc]
if full_replace:
if (new_doc[0].get('@type') != '@context'):
raise ValueError('The first item in docuemnt need to be dictionary representing the context object.')
elif (new_doc[0].get('@type') == '@context'):
warnings.warn('To replace context, need to use `full_replace` or `replace_document`, skipping context object now.')
new_doc.pop(0)
json_string = json.dumps(new_doc).encode('utf-8')
if ((compress != 'never') and (len(json_string) > compress)):
headers.update({'Content-Encoding': 'gzip', 'Content-Type': 'application/json'})
result = requests.post(self._documents_url(), headers=headers, params=params, data=gzip.compress(json_string), auth=self._auth())
else:
result = requests.post(self._documents_url(), headers=headers, params=params, json=new_doc, auth=self._auth())
result = json.loads(_finish_response(result))
if isinstance(document, list):
for (idx, item) in enumerate(document):
if (hasattr(item, '_obj_to_dict') and (not hasattr(item, '_backend_id'))):
item._backend_id = result[idx][len('terminusdb:///data/'):]
return result |
def replace_document(self, document: Union[(dict, List[dict], 'WOQLSchema', 'DocumentTemplate', List['DocumentTemplate'])], graph_type: str='instance', commit_msg: Optional[str]=None, last_data_version: Optional[str]=None, compress: Union[(str, int)]=1024, create: bool=False) -> None:
'Updates the specified document(s)\n\n Parameters\n ----------\n document: dict or list of dict\n Document(s) to be updated.\n graph_type : str\n Graph type, either "instance" or "schema".\n commit_msg : str\n Commit message.\n last_data_version : str\n Last version before the update, used to check if the document has been changed unknowingly\n compress : str or int\n If it is an integer, size of the data larger than this (in bytes) will be compress with gzip in the request (assume encoding as UTF-8, 0 = always compress). If it is `never` it will never compress the data.\n create : bool\n Create the document if it does not yet exist.\n\n Raises\n ------\n InterfaceError\n if the client does not connect to a database\n '
self._validate_graph_type(graph_type)
self._check_connection()
params = self._generate_commit(commit_msg)
params['graph_type'] = graph_type
params['create'] = ('true' if create else 'false')
headers = {'user-agent': f'terminusdb-client-python/{__version__}'}
if (last_data_version is not None):
headers['TerminusDB-Data-Version'] = last_data_version
new_doc = self._convert_dcoument(document, graph_type)
json_string = json.dumps(new_doc).encode('utf-8')
if ((compress != 'never') and (len(json_string) > compress)):
headers.update({'Content-Encoding': 'gzip', 'Content-Type': 'application/json'})
result = requests.put(self._documents_url(), headers=headers, params=params, data=gzip.compress(json_string), auth=self._auth())
else:
result = requests.put(self._documents_url(), headers=headers, params=params, json=new_doc, auth=self._auth())
result = json.loads(_finish_response(result))
if isinstance(document, list):
for (idx, item) in enumerate(document):
if (hasattr(item, '_obj_to_dict') and (not hasattr(item, '_backend_id'))):
item._backend_id = result[idx][len('terminusdb:///data/'):]
return result | -4,453,975,394,859,476,500 | Updates the specified document(s)
Parameters
----------
document: dict or list of dict
Document(s) to be updated.
graph_type : str
Graph type, either "instance" or "schema".
commit_msg : str
Commit message.
last_data_version : str
Last version before the update, used to check if the document has been changed unknowingly
compress : str or int
If it is an integer, size of the data larger than this (in bytes) will be compress with gzip in the request (assume encoding as UTF-8, 0 = always compress). If it is `never` it will never compress the data.
create : bool
Create the document if it does not yet exist.
Raises
------
InterfaceError
if the client does not connect to a database | terminusdb_client/woqlclient/woqlClient.py | replace_document | terminusdb/woql-client-p | python | def replace_document(self, document: Union[(dict, List[dict], 'WOQLSchema', 'DocumentTemplate', List['DocumentTemplate'])], graph_type: str='instance', commit_msg: Optional[str]=None, last_data_version: Optional[str]=None, compress: Union[(str, int)]=1024, create: bool=False) -> None:
'Updates the specified document(s)\n\n Parameters\n ----------\n document: dict or list of dict\n Document(s) to be updated.\n graph_type : str\n Graph type, either "instance" or "schema".\n commit_msg : str\n Commit message.\n last_data_version : str\n Last version before the update, used to check if the document has been changed unknowingly\n compress : str or int\n If it is an integer, size of the data larger than this (in bytes) will be compress with gzip in the request (assume encoding as UTF-8, 0 = always compress). If it is `never` it will never compress the data.\n create : bool\n Create the document if it does not yet exist.\n\n Raises\n ------\n InterfaceError\n if the client does not connect to a database\n '
self._validate_graph_type(graph_type)
self._check_connection()
params = self._generate_commit(commit_msg)
params['graph_type'] = graph_type
params['create'] = ('true' if create else 'false')
headers = {'user-agent': f'terminusdb-client-python/{__version__}'}
if (last_data_version is not None):
headers['TerminusDB-Data-Version'] = last_data_version
new_doc = self._convert_dcoument(document, graph_type)
json_string = json.dumps(new_doc).encode('utf-8')
if ((compress != 'never') and (len(json_string) > compress)):
headers.update({'Content-Encoding': 'gzip', 'Content-Type': 'application/json'})
result = requests.put(self._documents_url(), headers=headers, params=params, data=gzip.compress(json_string), auth=self._auth())
else:
result = requests.put(self._documents_url(), headers=headers, params=params, json=new_doc, auth=self._auth())
result = json.loads(_finish_response(result))
if isinstance(document, list):
for (idx, item) in enumerate(document):
if (hasattr(item, '_obj_to_dict') and (not hasattr(item, '_backend_id'))):
item._backend_id = result[idx][len('terminusdb:///data/'):]
return result |
def update_document(self, document: Union[(dict, List[dict], 'WOQLSchema', 'DocumentTemplate', List['DocumentTemplate'])], graph_type: str='instance', commit_msg: Optional[str]=None, last_data_version: Optional[str]=None, compress: Union[(str, int)]=1024) -> None:
'Updates the specified document(s). Add the document if not existed.\n\n Parameters\n ----------\n document: dict or list of dict\n Document(s) to be updated.\n graph_type : str\n Graph type, either "instance" or "schema".\n commit_msg : str\n Commit message.\n last_data_version : str\n Last version before the update, used to check if the document has been changed unknowingly\n compress : str or int\n If it is an integer, size of the data larger than this (in bytes) will be compress with gzip in the request (assume encoding as UTF-8, 0 = always compress). If it is `never` it will never compress the data.\n\n Raises\n ------\n InterfaceError\n if the client does not connect to a database\n '
self.replace_document(document, graph_type, commit_msg, last_data_version, compress, True) | 4,663,193,108,761,359,000 | Updates the specified document(s). Add the document if not existed.
Parameters
----------
document: dict or list of dict
Document(s) to be updated.
graph_type : str
Graph type, either "instance" or "schema".
commit_msg : str
Commit message.
last_data_version : str
Last version before the update, used to check if the document has been changed unknowingly
compress : str or int
If it is an integer, size of the data larger than this (in bytes) will be compress with gzip in the request (assume encoding as UTF-8, 0 = always compress). If it is `never` it will never compress the data.
Raises
------
InterfaceError
if the client does not connect to a database | terminusdb_client/woqlclient/woqlClient.py | update_document | terminusdb/woql-client-p | python | def update_document(self, document: Union[(dict, List[dict], 'WOQLSchema', 'DocumentTemplate', List['DocumentTemplate'])], graph_type: str='instance', commit_msg: Optional[str]=None, last_data_version: Optional[str]=None, compress: Union[(str, int)]=1024) -> None:
'Updates the specified document(s). Add the document if not existed.\n\n Parameters\n ----------\n document: dict or list of dict\n Document(s) to be updated.\n graph_type : str\n Graph type, either "instance" or "schema".\n commit_msg : str\n Commit message.\n last_data_version : str\n Last version before the update, used to check if the document has been changed unknowingly\n compress : str or int\n If it is an integer, size of the data larger than this (in bytes) will be compress with gzip in the request (assume encoding as UTF-8, 0 = always compress). If it is `never` it will never compress the data.\n\n Raises\n ------\n InterfaceError\n if the client does not connect to a database\n '
self.replace_document(document, graph_type, commit_msg, last_data_version, compress, True) |
def delete_document(self, document: Union[(str, list, dict, Iterable)], graph_type: str='instance', commit_msg: Optional[str]=None, last_data_version: Optional[str]=None) -> None:
'Delete the specified document(s)\n\n Parameters\n ----------\n document: str or list of str\n Document(s) (as dictionary or DocumentTemplate objects) or id(s) of document(s) to be updated.\n graph_type : str\n Graph type, either "instance" or "schema".\n commit_msg : str\n Commit message.\n last_data_version : str\n Last version before the update, used to check if the document has been changed unknowingly\n\n Raises\n ------\n InterfaceError\n if the client does not connect to a database\n '
self._validate_graph_type(graph_type)
self._check_connection()
doc_id = []
if ((not isinstance(document, (str, list, dict))) and hasattr(document, '__iter__')):
document = list(document)
if (not isinstance(document, list)):
document = [document]
for doc in document:
if hasattr(doc, '_obj_to_dict'):
doc = doc._obj_to_dict()
if (isinstance(doc, dict) and doc.get('@id')):
doc_id.append(doc.get('@id'))
elif isinstance(doc, str):
doc_id.append(doc)
params = self._generate_commit(commit_msg)
params['graph_type'] = graph_type
headers = {'user-agent': f'terminusdb-client-python/{__version__}'}
if (last_data_version is not None):
headers['TerminusDB-Data-Version'] = last_data_version
_finish_response(requests.delete(self._documents_url(), headers=headers, params=params, json=doc_id, auth=self._auth())) | -5,628,106,040,500,408,000 | Delete the specified document(s)
Parameters
----------
document: str or list of str
Document(s) (as dictionary or DocumentTemplate objects) or id(s) of document(s) to be updated.
graph_type : str
Graph type, either "instance" or "schema".
commit_msg : str
Commit message.
last_data_version : str
Last version before the update, used to check if the document has been changed unknowingly
Raises
------
InterfaceError
if the client does not connect to a database | terminusdb_client/woqlclient/woqlClient.py | delete_document | terminusdb/woql-client-p | python | def delete_document(self, document: Union[(str, list, dict, Iterable)], graph_type: str='instance', commit_msg: Optional[str]=None, last_data_version: Optional[str]=None) -> None:
'Delete the specified document(s)\n\n Parameters\n ----------\n document: str or list of str\n Document(s) (as dictionary or DocumentTemplate objects) or id(s) of document(s) to be updated.\n graph_type : str\n Graph type, either "instance" or "schema".\n commit_msg : str\n Commit message.\n last_data_version : str\n Last version before the update, used to check if the document has been changed unknowingly\n\n Raises\n ------\n InterfaceError\n if the client does not connect to a database\n '
self._validate_graph_type(graph_type)
self._check_connection()
doc_id = []
if ((not isinstance(document, (str, list, dict))) and hasattr(document, '__iter__')):
document = list(document)
if (not isinstance(document, list)):
document = [document]
for doc in document:
if hasattr(doc, '_obj_to_dict'):
doc = doc._obj_to_dict()
if (isinstance(doc, dict) and doc.get('@id')):
doc_id.append(doc.get('@id'))
elif isinstance(doc, str):
doc_id.append(doc)
params = self._generate_commit(commit_msg)
params['graph_type'] = graph_type
headers = {'user-agent': f'terminusdb-client-python/{__version__}'}
if (last_data_version is not None):
headers['TerminusDB-Data-Version'] = last_data_version
_finish_response(requests.delete(self._documents_url(), headers=headers, params=params, json=doc_id, auth=self._auth())) |
def has_doc(self, doc_id: str, graph_type: str='instance') -> bool:
'Check if a certain document exist in a database\n\n Parameters\n ----------\n doc_id: str\n Id of document to be checked.\n graph_type : str\n Graph type, either "instance" or "schema".\n\n returns\n -------\n Bool\n if the document exist\n '
self._validate_graph_type(graph_type)
self._check_connection()
all_existing_obj = self.get_all_documents(graph_type=graph_type)
all_existing_id = list(map((lambda x: x.get('@id')), all_existing_obj))
return (doc_id in all_existing_id) | -5,075,576,481,415,718,000 | Check if a certain document exist in a database
Parameters
----------
doc_id: str
Id of document to be checked.
graph_type : str
Graph type, either "instance" or "schema".
returns
-------
Bool
if the document exist | terminusdb_client/woqlclient/woqlClient.py | has_doc | terminusdb/woql-client-p | python | def has_doc(self, doc_id: str, graph_type: str='instance') -> bool:
'Check if a certain document exist in a database\n\n Parameters\n ----------\n doc_id: str\n Id of document to be checked.\n graph_type : str\n Graph type, either "instance" or "schema".\n\n returns\n -------\n Bool\n if the document exist\n '
self._validate_graph_type(graph_type)
self._check_connection()
all_existing_obj = self.get_all_documents(graph_type=graph_type)
all_existing_id = list(map((lambda x: x.get('@id')), all_existing_obj))
return (doc_id in all_existing_id) |
def get_class_frame(self, class_name):
'Get the frame of the class of class_name. Provide information about all the avaliable properties of that class.\n\n Parameters\n ----------\n class_name: str\n Name of the class\n\n returns\n -------\n dict\n Dictionary containing information\n '
self._check_connection()
opts = {'type': class_name}
result = requests.get(self._class_frame_url(), headers={'user-agent': f'terminusdb-client-python/{__version__}'}, params=opts, auth=self._auth())
return json.loads(_finish_response(result)) | -2,056,334,198,750,349,600 | Get the frame of the class of class_name. Provide information about all the avaliable properties of that class.
Parameters
----------
class_name: str
Name of the class
returns
-------
dict
Dictionary containing information | terminusdb_client/woqlclient/woqlClient.py | get_class_frame | terminusdb/woql-client-p | python | def get_class_frame(self, class_name):
'Get the frame of the class of class_name. Provide information about all the avaliable properties of that class.\n\n Parameters\n ----------\n class_name: str\n Name of the class\n\n returns\n -------\n dict\n Dictionary containing information\n '
self._check_connection()
opts = {'type': class_name}
result = requests.get(self._class_frame_url(), headers={'user-agent': f'terminusdb-client-python/{__version__}'}, params=opts, auth=self._auth())
return json.loads(_finish_response(result)) |
def commit(self):
'Not implementated: open transactions currently not suportted. Please check back later.' | 4,602,813,655,849,661,000 | Not implementated: open transactions currently not suportted. Please check back later. | terminusdb_client/woqlclient/woqlClient.py | commit | terminusdb/woql-client-p | python | def commit(self):
|
def query(self, woql_query: Union[(dict, WOQLQuery)], commit_msg: Optional[str]=None, get_data_version: bool=False, last_data_version: Optional[str]=None) -> Union[(dict, str)]:
'Updates the contents of the specified graph with the triples encoded in turtle format Replaces the entire graph contents\n\n Parameters\n ----------\n woql_query : dict or WOQLQuery object\n A woql query as an object or dict\n commit_mg : str\n A message that will be written to the commit log to describe the change\n get_data_version: bool\n If the data version of the query result(s) should be obtained. If True, the method return the result and the version as a tuple.\n last_data_version : str\n Last version before the update, used to check if the document has been changed unknowingly\n file_dict: **deprecated**\n File dictionary to be associated with post name => filename, for multipart POST\n\n Raises\n ------\n InterfaceError\n if the client does not connect to a database\n\n Examples\n -------\n >>> WOQLClient(server="http://localhost:6363").query(woql, "updating graph")\n\n Returns\n -------\n dict\n '
self._check_connection()
query_obj = {'commit_info': self._generate_commit(commit_msg)}
if isinstance(woql_query, WOQLQuery):
request_woql_query = woql_query.to_dict()
else:
request_woql_query = woql_query
query_obj['query'] = request_woql_query
headers = {'user-agent': f'terminusdb-client-python/{__version__}'}
if (last_data_version is not None):
headers['TerminusDB-Data-Version'] = last_data_version
result = requests.post(self._query_url(), headers=headers, json=query_obj, auth=self._auth())
if get_data_version:
(result, version) = _finish_response(result, get_data_version)
result = json.loads(result)
else:
result = json.loads(_finish_response(result))
if (result.get('inserts') or result.get('deletes')):
return 'Commit successfully made.'
elif get_data_version:
return (result, version)
else:
return result | 4,067,560,774,186,981,000 | Updates the contents of the specified graph with the triples encoded in turtle format Replaces the entire graph contents
Parameters
----------
woql_query : dict or WOQLQuery object
A woql query as an object or dict
commit_mg : str
A message that will be written to the commit log to describe the change
get_data_version: bool
If the data version of the query result(s) should be obtained. If True, the method return the result and the version as a tuple.
last_data_version : str
Last version before the update, used to check if the document has been changed unknowingly
file_dict: **deprecated**
File dictionary to be associated with post name => filename, for multipart POST
Raises
------
InterfaceError
if the client does not connect to a database
Examples
-------
>>> WOQLClient(server="http://localhost:6363").query(woql, "updating graph")
Returns
-------
dict | terminusdb_client/woqlclient/woqlClient.py | query | terminusdb/woql-client-p | python | def query(self, woql_query: Union[(dict, WOQLQuery)], commit_msg: Optional[str]=None, get_data_version: bool=False, last_data_version: Optional[str]=None) -> Union[(dict, str)]:
'Updates the contents of the specified graph with the triples encoded in turtle format Replaces the entire graph contents\n\n Parameters\n ----------\n woql_query : dict or WOQLQuery object\n A woql query as an object or dict\n commit_mg : str\n A message that will be written to the commit log to describe the change\n get_data_version: bool\n If the data version of the query result(s) should be obtained. If True, the method return the result and the version as a tuple.\n last_data_version : str\n Last version before the update, used to check if the document has been changed unknowingly\n file_dict: **deprecated**\n File dictionary to be associated with post name => filename, for multipart POST\n\n Raises\n ------\n InterfaceError\n if the client does not connect to a database\n\n Examples\n -------\n >>> WOQLClient(server="http://localhost:6363").query(woql, "updating graph")\n\n Returns\n -------\n dict\n '
self._check_connection()
query_obj = {'commit_info': self._generate_commit(commit_msg)}
if isinstance(woql_query, WOQLQuery):
request_woql_query = woql_query.to_dict()
else:
request_woql_query = woql_query
query_obj['query'] = request_woql_query
headers = {'user-agent': f'terminusdb-client-python/{__version__}'}
if (last_data_version is not None):
headers['TerminusDB-Data-Version'] = last_data_version
result = requests.post(self._query_url(), headers=headers, json=query_obj, auth=self._auth())
if get_data_version:
(result, version) = _finish_response(result, get_data_version)
result = json.loads(result)
else:
result = json.loads(_finish_response(result))
if (result.get('inserts') or result.get('deletes')):
return 'Commit successfully made.'
elif get_data_version:
return (result, version)
else:
return result |
def create_branch(self, new_branch_id: str, empty: bool=False) -> None:
'Create a branch starting from the current branch.\n\n Parameters\n ----------\n new_branch_id : str\n New branch identifier.\n empty : bool\n Create an empty branch if true (no starting commit)\n\n Raises\n ------\n InterfaceError\n if the client does not connect to a database\n '
self._check_connection()
if empty:
source = {}
elif self.ref:
source = {'origin': f'{self.team}/{self.db}/{self.repo}/commit/{self.ref}'}
else:
source = {'origin': f'{self.team}/{self.db}/{self.repo}/branch/{self.branch}'}
_finish_response(requests.post(self._branch_url(new_branch_id), headers={'user-agent': f'terminusdb-client-python/{__version__}'}, json=source, auth=self._auth())) | -8,907,302,686,537,837,000 | Create a branch starting from the current branch.
Parameters
----------
new_branch_id : str
New branch identifier.
empty : bool
Create an empty branch if true (no starting commit)
Raises
------
InterfaceError
if the client does not connect to a database | terminusdb_client/woqlclient/woqlClient.py | create_branch | terminusdb/woql-client-p | python | def create_branch(self, new_branch_id: str, empty: bool=False) -> None:
'Create a branch starting from the current branch.\n\n Parameters\n ----------\n new_branch_id : str\n New branch identifier.\n empty : bool\n Create an empty branch if true (no starting commit)\n\n Raises\n ------\n InterfaceError\n if the client does not connect to a database\n '
self._check_connection()
if empty:
source = {}
elif self.ref:
source = {'origin': f'{self.team}/{self.db}/{self.repo}/commit/{self.ref}'}
else:
source = {'origin': f'{self.team}/{self.db}/{self.repo}/branch/{self.branch}'}
_finish_response(requests.post(self._branch_url(new_branch_id), headers={'user-agent': f'terminusdb-client-python/{__version__}'}, json=source, auth=self._auth())) |
def delete_branch(self, branch_id: str) -> None:
'Delete a branch\n\n Parameters\n ----------\n branch_id : str\n Branch to delete\n\n Raises\n ------\n InterfaceError\n if the client does not connect to a database\n '
self._check_connection()
_finish_response(requests.delete(self._branch_url(branch_id), headers={'user-agent': f'terminusdb-client-python/{__version__}'}, auth=self._auth())) | 5,665,887,188,130,106,000 | Delete a branch
Parameters
----------
branch_id : str
Branch to delete
Raises
------
InterfaceError
if the client does not connect to a database | terminusdb_client/woqlclient/woqlClient.py | delete_branch | terminusdb/woql-client-p | python | def delete_branch(self, branch_id: str) -> None:
'Delete a branch\n\n Parameters\n ----------\n branch_id : str\n Branch to delete\n\n Raises\n ------\n InterfaceError\n if the client does not connect to a database\n '
self._check_connection()
_finish_response(requests.delete(self._branch_url(branch_id), headers={'user-agent': f'terminusdb-client-python/{__version__}'}, auth=self._auth())) |
def pull(self, remote: str='origin', remote_branch: Optional[str]=None, message: Optional[str]=None, author: Optional[str]=None) -> dict:
'Pull updates from a remote repository to the current database.\n\n Parameters\n ----------\n remote: str\n remote to pull from, default "origin"\n remote_branch: str, optional\n remote branch to pull from, default to be your current barnch\n message: str, optional\n optional commit message\n author: str, optional\n option to overide the author of the operation\n\n Raises\n ------\n InterfaceError\n if the client does not connect to a database\n\n Returns\n -------\n dict\n\n Examples\n --------\n >>> client = WOQLClient("https://127.0.0.1:6363/")\n >>> client.pull()\n '
self._check_connection()
if (remote_branch is None):
remote_branch = self.branch
if (author is None):
author = self.author
if (message is None):
message = f'Pulling from {remote}/{remote_branch} by Python client {__version__}'
rc_args = {'remote': remote, 'remote_branch': remote_branch, 'author': author, 'message': message}
result = requests.post(self._pull_url(), headers={'user-agent': f'terminusdb-client-python/{__version__}'}, json=rc_args, auth=self._auth())
return json.loads(_finish_response(result)) | -717,244,658,174,841,200 | Pull updates from a remote repository to the current database.
Parameters
----------
remote: str
remote to pull from, default "origin"
remote_branch: str, optional
remote branch to pull from, default to be your current barnch
message: str, optional
optional commit message
author: str, optional
option to overide the author of the operation
Raises
------
InterfaceError
if the client does not connect to a database
Returns
-------
dict
Examples
--------
>>> client = WOQLClient("https://127.0.0.1:6363/")
>>> client.pull() | terminusdb_client/woqlclient/woqlClient.py | pull | terminusdb/woql-client-p | python | def pull(self, remote: str='origin', remote_branch: Optional[str]=None, message: Optional[str]=None, author: Optional[str]=None) -> dict:
'Pull updates from a remote repository to the current database.\n\n Parameters\n ----------\n remote: str\n remote to pull from, default "origin"\n remote_branch: str, optional\n remote branch to pull from, default to be your current barnch\n message: str, optional\n optional commit message\n author: str, optional\n option to overide the author of the operation\n\n Raises\n ------\n InterfaceError\n if the client does not connect to a database\n\n Returns\n -------\n dict\n\n Examples\n --------\n >>> client = WOQLClient("https://127.0.0.1:6363/")\n >>> client.pull()\n '
self._check_connection()
if (remote_branch is None):
remote_branch = self.branch
if (author is None):
author = self.author
if (message is None):
message = f'Pulling from {remote}/{remote_branch} by Python client {__version__}'
rc_args = {'remote': remote, 'remote_branch': remote_branch, 'author': author, 'message': message}
result = requests.post(self._pull_url(), headers={'user-agent': f'terminusdb-client-python/{__version__}'}, json=rc_args, auth=self._auth())
return json.loads(_finish_response(result)) |
def fetch(self, remote_id: str) -> dict:
'Fatch the brach from a remote\n\n Parameters\n ----------\n remote_id: str\n id of the remote\n\n Raises\n ------\n InterfaceError\n if the client does not connect to a database'
self._check_connection()
result = requests.post(self._fetch_url(remote_id), headers={'user-agent': f'terminusdb-client-python/{__version__}'}, auth=self._auth())
return json.loads(_finish_response(result)) | -7,423,922,888,655,255,000 | Fatch the brach from a remote
Parameters
----------
remote_id: str
id of the remote
Raises
------
InterfaceError
if the client does not connect to a database | terminusdb_client/woqlclient/woqlClient.py | fetch | terminusdb/woql-client-p | python | def fetch(self, remote_id: str) -> dict:
'Fatch the brach from a remote\n\n Parameters\n ----------\n remote_id: str\n id of the remote\n\n Raises\n ------\n InterfaceError\n if the client does not connect to a database'
self._check_connection()
result = requests.post(self._fetch_url(remote_id), headers={'user-agent': f'terminusdb-client-python/{__version__}'}, auth=self._auth())
return json.loads(_finish_response(result)) |
def push(self, remote: str='origin', remote_branch: Optional[str]=None, message: Optional[str]=None, author: Optional[str]=None) -> dict:
'Push changes from a branch to a remote repo\n\n Parameters\n ----------\n remote: str\n remote to push to, default "origin"\n remote_branch: str, optional\n remote branch to push to, default to be your current barnch\n message: str, optional\n optional commit message\n author: str, optional\n option to overide the author of the operation\n\n Raises\n ------\n InterfaceError\n if the client does not connect to a database\n\n Examples\n -------\n >>> WOQLClient(server="http://localhost:6363").push(remote="origin", remote_branch = "main", author = "admin", message = "commit message"})\n\n Returns\n -------\n dict\n '
self._check_connection()
if (remote_branch is None):
remote_branch = self.branch
if (author is None):
author = self._author
if (message is None):
message = f'Pushing to {remote}/{remote_branch} by Python client {__version__}'
rc_args = {'remote': remote, 'remote_branch': remote_branch, 'author': author, 'message': message}
result = requests.post(self._push_url(), headers={'user-agent': f'terminusdb-client-python/{__version__}'}, json=rc_args, auth=self._auth())
return json.loads(_finish_response(result)) | -6,826,902,822,942,968,000 | Push changes from a branch to a remote repo
Parameters
----------
remote: str
remote to push to, default "origin"
remote_branch: str, optional
remote branch to push to, default to be your current barnch
message: str, optional
optional commit message
author: str, optional
option to overide the author of the operation
Raises
------
InterfaceError
if the client does not connect to a database
Examples
-------
>>> WOQLClient(server="http://localhost:6363").push(remote="origin", remote_branch = "main", author = "admin", message = "commit message"})
Returns
-------
dict | terminusdb_client/woqlclient/woqlClient.py | push | terminusdb/woql-client-p | python | def push(self, remote: str='origin', remote_branch: Optional[str]=None, message: Optional[str]=None, author: Optional[str]=None) -> dict:
'Push changes from a branch to a remote repo\n\n Parameters\n ----------\n remote: str\n remote to push to, default "origin"\n remote_branch: str, optional\n remote branch to push to, default to be your current barnch\n message: str, optional\n optional commit message\n author: str, optional\n option to overide the author of the operation\n\n Raises\n ------\n InterfaceError\n if the client does not connect to a database\n\n Examples\n -------\n >>> WOQLClient(server="http://localhost:6363").push(remote="origin", remote_branch = "main", author = "admin", message = "commit message"})\n\n Returns\n -------\n dict\n '
self._check_connection()
if (remote_branch is None):
remote_branch = self.branch
if (author is None):
author = self._author
if (message is None):
message = f'Pushing to {remote}/{remote_branch} by Python client {__version__}'
rc_args = {'remote': remote, 'remote_branch': remote_branch, 'author': author, 'message': message}
result = requests.post(self._push_url(), headers={'user-agent': f'terminusdb-client-python/{__version__}'}, json=rc_args, auth=self._auth())
return json.loads(_finish_response(result)) |
def rebase(self, branch: Optional[str]=None, commit: Optional[str]=None, rebase_source: Optional[str]=None, message: Optional[str]=None, author: Optional[str]=None) -> dict:
'Rebase the current branch onto the specified remote branch. Need to specify one of \'branch\',\'commit\' or the \'rebase_source\'.\n\n Notes\n -----\n The "remote" repo can live in the local database.\n\n Parameters\n ----------\n branch : str, optional\n the branch for the rebase\n rebase_source : str, optional\n the source branch for the rebase\n message : str, optional\n the commit message\n author : str, optional\n the commit author\n\n Raises\n ------\n InterfaceError\n if the client does not connect to a database\n\n Returns\n -------\n dict\n\n Examples\n --------\n >>> client = WOQLClient("https://127.0.0.1:6363/")\n >>> client.rebase("the_branch")\n '
self._check_connection()
if ((branch is not None) and (commit is None)):
rebase_source = '/'.join([self.team, self.db, self.repo, 'branch', branch])
elif ((branch is None) and (commit is not None)):
rebase_source = '/'.join([self.team, self.db, self.repo, 'commit', commit])
elif ((branch is not None) or (commit is not None)):
raise RuntimeError('Cannot specify both branch and commit.')
elif (rebase_source is None):
raise RuntimeError("Need to specify one of 'branch', 'commit' or the 'rebase_source'")
if (author is None):
author = self._author
if (message is None):
message = f'Rebase from {rebase_source} by Python client {__version__}'
rc_args = {'rebase_from': rebase_source, 'author': author, 'message': message}
result = requests.post(self._rebase_url(), headers={'user-agent': f'terminusdb-client-python/{__version__}'}, json=rc_args, auth=self._auth())
return json.loads(_finish_response(result)) | -2,819,286,272,856,279,600 | Rebase the current branch onto the specified remote branch. Need to specify one of 'branch','commit' or the 'rebase_source'.
Notes
-----
The "remote" repo can live in the local database.
Parameters
----------
branch : str, optional
the branch for the rebase
rebase_source : str, optional
the source branch for the rebase
message : str, optional
the commit message
author : str, optional
the commit author
Raises
------
InterfaceError
if the client does not connect to a database
Returns
-------
dict
Examples
--------
>>> client = WOQLClient("https://127.0.0.1:6363/")
>>> client.rebase("the_branch") | terminusdb_client/woqlclient/woqlClient.py | rebase | terminusdb/woql-client-p | python | def rebase(self, branch: Optional[str]=None, commit: Optional[str]=None, rebase_source: Optional[str]=None, message: Optional[str]=None, author: Optional[str]=None) -> dict:
'Rebase the current branch onto the specified remote branch. Need to specify one of \'branch\',\'commit\' or the \'rebase_source\'.\n\n Notes\n -----\n The "remote" repo can live in the local database.\n\n Parameters\n ----------\n branch : str, optional\n the branch for the rebase\n rebase_source : str, optional\n the source branch for the rebase\n message : str, optional\n the commit message\n author : str, optional\n the commit author\n\n Raises\n ------\n InterfaceError\n if the client does not connect to a database\n\n Returns\n -------\n dict\n\n Examples\n --------\n >>> client = WOQLClient("https://127.0.0.1:6363/")\n >>> client.rebase("the_branch")\n '
self._check_connection()
if ((branch is not None) and (commit is None)):
rebase_source = '/'.join([self.team, self.db, self.repo, 'branch', branch])
elif ((branch is None) and (commit is not None)):
rebase_source = '/'.join([self.team, self.db, self.repo, 'commit', commit])
elif ((branch is not None) or (commit is not None)):
raise RuntimeError('Cannot specify both branch and commit.')
elif (rebase_source is None):
raise RuntimeError("Need to specify one of 'branch', 'commit' or the 'rebase_source'")
if (author is None):
author = self._author
if (message is None):
message = f'Rebase from {rebase_source} by Python client {__version__}'
rc_args = {'rebase_from': rebase_source, 'author': author, 'message': message}
result = requests.post(self._rebase_url(), headers={'user-agent': f'terminusdb-client-python/{__version__}'}, json=rc_args, auth=self._auth())
return json.loads(_finish_response(result)) |
def reset(self, commit: Optional[str]=None, soft: bool=False, use_path: bool=False) -> None:
'Reset the current branch HEAD to the specified commit path. If `soft` is not True, it will be a hard reset, meaning reset to that commit in the backend and newer commit will be wipped out. If `soft` is True, the client will only reference to that commit and can be reset to the newest commit when done.\n\n Raises\n ------\n InterfaceError\n if the client does not connect to a database\n\n Notes\n -----\n The "remote" repo can live in the local database.\n\n Parameters\n ----------\n commit: string\n Commit id or path to the commit (if use_path is True), for instance \'234980523ffaf93\' or \'admin/database/local/commit/234980523ffaf93\'. If not provided, it will reset to the newest commit (useful when need to go back after a soft reset).\n soft: bool\n Flag indicating if the reset if soft, that is referencing to a previous commit instead of resetting to a previous commit in the backend and wipping newer commits.\n use_path : bool\n Wheather or not the commit given is an id or path. Default using id and use_path is False.\n\n Examples\n --------\n >>> client = WOQLClient("https://127.0.0.1:6363/")\n >>> client.reset(\'234980523ffaf93\')\n >>> client.reset(\'admin/database/local/commit/234980523ffaf93\', use_path=True)\n '
self._check_connection()
if soft:
if use_path:
self._ref = commit.split('/')[(- 1)]
else:
self._ref = commit
return None
else:
self._ref = None
if (commit is None):
return None
if use_path:
commit_path = commit
else:
commit_path = f'{self.team}/{self.db}/{self.repo}/commit/{commit}'
_finish_response(requests.post(self._reset_url(), headers={'user-agent': f'terminusdb-client-python/{__version__}'}, json={'commit_descriptor': commit_path}, auth=self._auth())) | -2,691,294,074,058,396,000 | Reset the current branch HEAD to the specified commit path. If `soft` is not True, it will be a hard reset, meaning reset to that commit in the backend and newer commit will be wipped out. If `soft` is True, the client will only reference to that commit and can be reset to the newest commit when done.
Raises
------
InterfaceError
if the client does not connect to a database
Notes
-----
The "remote" repo can live in the local database.
Parameters
----------
commit: string
Commit id or path to the commit (if use_path is True), for instance '234980523ffaf93' or 'admin/database/local/commit/234980523ffaf93'. If not provided, it will reset to the newest commit (useful when need to go back after a soft reset).
soft: bool
Flag indicating if the reset if soft, that is referencing to a previous commit instead of resetting to a previous commit in the backend and wipping newer commits.
use_path : bool
Wheather or not the commit given is an id or path. Default using id and use_path is False.
Examples
--------
>>> client = WOQLClient("https://127.0.0.1:6363/")
>>> client.reset('234980523ffaf93')
>>> client.reset('admin/database/local/commit/234980523ffaf93', use_path=True) | terminusdb_client/woqlclient/woqlClient.py | reset | terminusdb/woql-client-p | python | def reset(self, commit: Optional[str]=None, soft: bool=False, use_path: bool=False) -> None:
'Reset the current branch HEAD to the specified commit path. If `soft` is not True, it will be a hard reset, meaning reset to that commit in the backend and newer commit will be wipped out. If `soft` is True, the client will only reference to that commit and can be reset to the newest commit when done.\n\n Raises\n ------\n InterfaceError\n if the client does not connect to a database\n\n Notes\n -----\n The "remote" repo can live in the local database.\n\n Parameters\n ----------\n commit: string\n Commit id or path to the commit (if use_path is True), for instance \'234980523ffaf93\' or \'admin/database/local/commit/234980523ffaf93\'. If not provided, it will reset to the newest commit (useful when need to go back after a soft reset).\n soft: bool\n Flag indicating if the reset if soft, that is referencing to a previous commit instead of resetting to a previous commit in the backend and wipping newer commits.\n use_path : bool\n Wheather or not the commit given is an id or path. Default using id and use_path is False.\n\n Examples\n --------\n >>> client = WOQLClient("https://127.0.0.1:6363/")\n >>> client.reset(\'234980523ffaf93\')\n >>> client.reset(\'admin/database/local/commit/234980523ffaf93\', use_path=True)\n '
self._check_connection()
if soft:
if use_path:
self._ref = commit.split('/')[(- 1)]
else:
self._ref = commit
return None
else:
self._ref = None
if (commit is None):
return None
if use_path:
commit_path = commit
else:
commit_path = f'{self.team}/{self.db}/{self.repo}/commit/{commit}'
_finish_response(requests.post(self._reset_url(), headers={'user-agent': f'terminusdb-client-python/{__version__}'}, json={'commit_descriptor': commit_path}, auth=self._auth())) |
def optimize(self, path: str) -> None:
'Optimize the specified path.\n\n Raises\n ------\n InterfaceError\n if the client does not connect to a database\n\n Notes\n -----\n The "remote" repo can live in the local database.\n\n Parameters\n ----------\n path : string\n Path to optimize, for instance admin/database/_meta for the repo graph.\n\n Examples\n --------\n >>> client = WOQLClient("https://127.0.0.1:6363/")\n >>> client.optimize(\'admin/database\') # optimise database branch (here main)\n >>> client.optimize(\'admin/database/_meta\') # optimise the repository graph (actually creates a squashed flat layer)\n >>> client.optimize(\'admin/database/local/_commits\') # commit graph is optimised\n '
self._check_connection()
_finish_response(requests.post(self._optimize_url(path), headers={'user-agent': f'terminusdb-client-python/{__version__}'}, auth=self._auth())) | -5,317,494,528,360,595,000 | Optimize the specified path.
Raises
------
InterfaceError
if the client does not connect to a database
Notes
-----
The "remote" repo can live in the local database.
Parameters
----------
path : string
Path to optimize, for instance admin/database/_meta for the repo graph.
Examples
--------
>>> client = WOQLClient("https://127.0.0.1:6363/")
>>> client.optimize('admin/database') # optimise database branch (here main)
>>> client.optimize('admin/database/_meta') # optimise the repository graph (actually creates a squashed flat layer)
>>> client.optimize('admin/database/local/_commits') # commit graph is optimised | terminusdb_client/woqlclient/woqlClient.py | optimize | terminusdb/woql-client-p | python | def optimize(self, path: str) -> None:
'Optimize the specified path.\n\n Raises\n ------\n InterfaceError\n if the client does not connect to a database\n\n Notes\n -----\n The "remote" repo can live in the local database.\n\n Parameters\n ----------\n path : string\n Path to optimize, for instance admin/database/_meta for the repo graph.\n\n Examples\n --------\n >>> client = WOQLClient("https://127.0.0.1:6363/")\n >>> client.optimize(\'admin/database\') # optimise database branch (here main)\n >>> client.optimize(\'admin/database/_meta\') # optimise the repository graph (actually creates a squashed flat layer)\n >>> client.optimize(\'admin/database/local/_commits\') # commit graph is optimised\n '
self._check_connection()
_finish_response(requests.post(self._optimize_url(path), headers={'user-agent': f'terminusdb-client-python/{__version__}'}, auth=self._auth())) |
def squash(self, message: Optional[str]=None, author: Optional[str]=None, reset: bool=False) -> str:
'Squash the current branch HEAD into a commit\n\n Raises\n ------\n InterfaceError\n if the client does not connect to a database\n\n Notes\n -----\n The "remote" repo can live in the local database.\n\n Parameters\n ----------\n message : string\n Message for the newly created squash commit\n author : string\n Author of the commit\n reset : bool\n Perform reset after squash\n\n Returns\n -------\n str\n commit id to be reset\n\n Examples\n --------\n >>> client = WOQLClient("https://127.0.0.1:6363/")\n >>> client.connect(user="admin", key="root", team="admin", db="some_db")\n >>> client.squash(\'This is a squash commit message!\')\n '
self._check_connection()
result = requests.post(self._squash_url(), headers={'user-agent': f'terminusdb-client-python/{__version__}'}, json={'commit_info': self._generate_commit(message, author)}, auth=self._auth())
commit_id = json.loads(_finish_response(result)).get('api:commit')
if reset:
self.reset(commit_id)
return commit_id | 2,707,212,334,958,123,000 | Squash the current branch HEAD into a commit
Raises
------
InterfaceError
if the client does not connect to a database
Notes
-----
The "remote" repo can live in the local database.
Parameters
----------
message : string
Message for the newly created squash commit
author : string
Author of the commit
reset : bool
Perform reset after squash
Returns
-------
str
commit id to be reset
Examples
--------
>>> client = WOQLClient("https://127.0.0.1:6363/")
>>> client.connect(user="admin", key="root", team="admin", db="some_db")
>>> client.squash('This is a squash commit message!') | terminusdb_client/woqlclient/woqlClient.py | squash | terminusdb/woql-client-p | python | def squash(self, message: Optional[str]=None, author: Optional[str]=None, reset: bool=False) -> str:
'Squash the current branch HEAD into a commit\n\n Raises\n ------\n InterfaceError\n if the client does not connect to a database\n\n Notes\n -----\n The "remote" repo can live in the local database.\n\n Parameters\n ----------\n message : string\n Message for the newly created squash commit\n author : string\n Author of the commit\n reset : bool\n Perform reset after squash\n\n Returns\n -------\n str\n commit id to be reset\n\n Examples\n --------\n >>> client = WOQLClient("https://127.0.0.1:6363/")\n >>> client.connect(user="admin", key="root", team="admin", db="some_db")\n >>> client.squash(\'This is a squash commit message!\')\n '
self._check_connection()
result = requests.post(self._squash_url(), headers={'user-agent': f'terminusdb-client-python/{__version__}'}, json={'commit_info': self._generate_commit(message, author)}, auth=self._auth())
commit_id = json.loads(_finish_response(result)).get('api:commit')
if reset:
self.reset(commit_id)
return commit_id |
def diff(self, before: Union[(str, dict, List[dict], 'WOQLSchema', 'DocumentTemplate', List['DocumentTemplate'])], after: Union[(str, dict, List[dict], 'WOQLSchema', 'DocumentTemplate', List['DocumentTemplate'])], document_id: Union[(str, None)]=None):
'Perform diff on 2 set of document(s), result in a Patch object.\n\n Do not connect when using public API.\n\n Returns\n -------\n obj\n Patch object\n\n Examples\n --------\n >>> client = WOQLClient("https://127.0.0.1:6363/")\n >>> client.connect(user="admin", key="root", team="admin", db="some_db")\n >>> result = client.diff({ "@id" : "Person/Jane", "@type" : "Person", "name" : "Jane"}, { "@id" : "Person/Jane", "@type" : "Person", "name" : "Janine"})\n >>> result.to_json = \'{ "name" : { "@op" : "SwapValue", "@before" : "Jane", "@after": "Janine" }}\''
request_dict = {}
for (key, item) in {'before': before, 'after': after}.items():
if isinstance(item, str):
request_dict[f'{key}_data_version'] = item
else:
request_dict[key] = self._convert_diff_dcoument(item)
if (document_id is not None):
if ('before_data_version' in request_dict):
if (document_id[:len('terminusdb:///data')] == 'terminusdb:///data'):
request_dict['document_id'] = document_id
else:
raise ValueError(f'Valid document id starts with `terminusdb:///data`, but got {document_id}')
else:
raise ValueError('`document_id` can only be used in conjusction with a data version or commit ID as `before`, not a document object')
if self._connected:
result = _finish_response(requests.post(self._diff_url(), headers={'user-agent': f'terminusdb-client-python/{__version__}'}, json=request_dict, auth=self._auth()))
else:
result = _finish_response(requests.post(self.server_url, headers={'user-agent': f'terminusdb-client-python/{__version__}'}, json=request_dict))
return Patch(json=result) | -3,945,360,807,689,457,000 | Perform diff on 2 set of document(s), result in a Patch object.
Do not connect when using public API.
Returns
-------
obj
Patch object
Examples
--------
>>> client = WOQLClient("https://127.0.0.1:6363/")
>>> client.connect(user="admin", key="root", team="admin", db="some_db")
>>> result = client.diff({ "@id" : "Person/Jane", "@type" : "Person", "name" : "Jane"}, { "@id" : "Person/Jane", "@type" : "Person", "name" : "Janine"})
>>> result.to_json = '{ "name" : { "@op" : "SwapValue", "@before" : "Jane", "@after": "Janine" }}' | terminusdb_client/woqlclient/woqlClient.py | diff | terminusdb/woql-client-p | python | def diff(self, before: Union[(str, dict, List[dict], 'WOQLSchema', 'DocumentTemplate', List['DocumentTemplate'])], after: Union[(str, dict, List[dict], 'WOQLSchema', 'DocumentTemplate', List['DocumentTemplate'])], document_id: Union[(str, None)]=None):
'Perform diff on 2 set of document(s), result in a Patch object.\n\n Do not connect when using public API.\n\n Returns\n -------\n obj\n Patch object\n\n Examples\n --------\n >>> client = WOQLClient("https://127.0.0.1:6363/")\n >>> client.connect(user="admin", key="root", team="admin", db="some_db")\n >>> result = client.diff({ "@id" : "Person/Jane", "@type" : "Person", "name" : "Jane"}, { "@id" : "Person/Jane", "@type" : "Person", "name" : "Janine"})\n >>> result.to_json = \'{ "name" : { "@op" : "SwapValue", "@before" : "Jane", "@after": "Janine" }}\
request_dict = {}
for (key, item) in {'before': before, 'after': after}.items():
if isinstance(item, str):
request_dict[f'{key}_data_version'] = item
else:
request_dict[key] = self._convert_diff_dcoument(item)
if (document_id is not None):
if ('before_data_version' in request_dict):
if (document_id[:len('terminusdb:///data')] == 'terminusdb:///data'):
request_dict['document_id'] = document_id
else:
raise ValueError(f'Valid document id starts with `terminusdb:///data`, but got {document_id}')
else:
raise ValueError('`document_id` can only be used in conjusction with a data version or commit ID as `before`, not a document object')
if self._connected:
result = _finish_response(requests.post(self._diff_url(), headers={'user-agent': f'terminusdb-client-python/{__version__}'}, json=request_dict, auth=self._auth()))
else:
result = _finish_response(requests.post(self.server_url, headers={'user-agent': f'terminusdb-client-python/{__version__}'}, json=request_dict))
return Patch(json=result) |
def patch(self, before: Union[(dict, List[dict], 'WOQLSchema', 'DocumentTemplate', List['DocumentTemplate'])], patch: Patch):
'Apply the patch object to the before object and return an after object. Note that this change does not commit changes to the graph.\n\n Do not connect when using public API.\n\n Returns\n -------\n dict\n After object\n\n Examples\n --------\n >>> client = WOQLClient("https://127.0.0.1:6363/")\n >>> client.connect(user="admin", key="root", team="admin", db="some_db")\n >>> patch_obj = Patch(json=\'{"name" : { "@op" : "ValueSwap", "@before" : "Jane", "@after": "Janine" }}\')\n >>> result = client.patch({ "@id" : "Person/Jane", "@type" : Person", "name" : "Jane"}, patch_obj)\n >>> print(result)\n \'{ "@id" : "Person/Jane", "@type" : Person", "name" : "Janine"}\''
request_dict = {'before': self._convert_diff_dcoument(before), 'patch': patch.content}
if self._connected:
result = _finish_response(requests.post(self._patch_url(), headers={'user-agent': f'terminusdb-client-python/{__version__}'}, json=request_dict, auth=self._auth()))
else:
result = _finish_response(requests.post(self.server_url, headers={'user-agent': f'terminusdb-client-python/{__version__}'}, json=request_dict))
return json.loads(result) | -7,215,206,631,332,217,000 | Apply the patch object to the before object and return an after object. Note that this change does not commit changes to the graph.
Do not connect when using public API.
Returns
-------
dict
After object
Examples
--------
>>> client = WOQLClient("https://127.0.0.1:6363/")
>>> client.connect(user="admin", key="root", team="admin", db="some_db")
>>> patch_obj = Patch(json='{"name" : { "@op" : "ValueSwap", "@before" : "Jane", "@after": "Janine" }}')
>>> result = client.patch({ "@id" : "Person/Jane", "@type" : Person", "name" : "Jane"}, patch_obj)
>>> print(result)
'{ "@id" : "Person/Jane", "@type" : Person", "name" : "Janine"}' | terminusdb_client/woqlclient/woqlClient.py | patch | terminusdb/woql-client-p | python | def patch(self, before: Union[(dict, List[dict], 'WOQLSchema', 'DocumentTemplate', List['DocumentTemplate'])], patch: Patch):
'Apply the patch object to the before object and return an after object. Note that this change does not commit changes to the graph.\n\n Do not connect when using public API.\n\n Returns\n -------\n dict\n After object\n\n Examples\n --------\n >>> client = WOQLClient("https://127.0.0.1:6363/")\n >>> client.connect(user="admin", key="root", team="admin", db="some_db")\n >>> patch_obj = Patch(json=\'{"name" : { "@op" : "ValueSwap", "@before" : "Jane", "@after": "Janine" }}\')\n >>> result = client.patch({ "@id" : "Person/Jane", "@type" : Person", "name" : "Jane"}, patch_obj)\n >>> print(result)\n \'{ "@id" : "Person/Jane", "@type" : Person", "name" : "Janine"}\
request_dict = {'before': self._convert_diff_dcoument(before), 'patch': patch.content}
if self._connected:
result = _finish_response(requests.post(self._patch_url(), headers={'user-agent': f'terminusdb-client-python/{__version__}'}, json=request_dict, auth=self._auth()))
else:
result = _finish_response(requests.post(self.server_url, headers={'user-agent': f'terminusdb-client-python/{__version__}'}, json=request_dict))
return json.loads(result) |
def clonedb(self, clone_source: str, newid: str, description: Optional[str]=None) -> None:
'Clone a remote repository and create a local copy.\n\n Parameters\n ----------\n clone_source : str\n The source url of the repo to be cloned.\n newid : str\n Identifier of the new repository to create.\n Description : str, optional\n Optional description about the cloned database.\n\n Raises\n ------\n InterfaceError\n if the client does not connect to a database\n\n Examples\n --------\n >>> client = WOQLClient("https://127.0.0.1:6363/")\n >>> client.clonedb("http://terminusdb.com/some_user/test_db", "my_test_db")\n '
self._check_connection()
if (description is None):
description = f'New database {newid}'
rc_args = {'remote_url': clone_source, 'label': newid, 'comment': description}
_finish_response(requests.post(self._clone_url(newid), headers={'user-agent': f'terminusdb-client-python/{__version__}'}, json=rc_args, auth=self._auth())) | -6,368,264,291,951,515,000 | Clone a remote repository and create a local copy.
Parameters
----------
clone_source : str
The source url of the repo to be cloned.
newid : str
Identifier of the new repository to create.
Description : str, optional
Optional description about the cloned database.
Raises
------
InterfaceError
if the client does not connect to a database
Examples
--------
>>> client = WOQLClient("https://127.0.0.1:6363/")
>>> client.clonedb("http://terminusdb.com/some_user/test_db", "my_test_db") | terminusdb_client/woqlclient/woqlClient.py | clonedb | terminusdb/woql-client-p | python | def clonedb(self, clone_source: str, newid: str, description: Optional[str]=None) -> None:
'Clone a remote repository and create a local copy.\n\n Parameters\n ----------\n clone_source : str\n The source url of the repo to be cloned.\n newid : str\n Identifier of the new repository to create.\n Description : str, optional\n Optional description about the cloned database.\n\n Raises\n ------\n InterfaceError\n if the client does not connect to a database\n\n Examples\n --------\n >>> client = WOQLClient("https://127.0.0.1:6363/")\n >>> client.clonedb("http://terminusdb.com/some_user/test_db", "my_test_db")\n '
self._check_connection()
if (description is None):
description = f'New database {newid}'
rc_args = {'remote_url': clone_source, 'label': newid, 'comment': description}
_finish_response(requests.post(self._clone_url(newid), headers={'user-agent': f'terminusdb-client-python/{__version__}'}, json=rc_args, auth=self._auth())) |
def _generate_commit(self, msg: Optional[str]=None, author: Optional[str]=None) -> dict:
'Pack the specified commit info into a dict format expected by the server.\n\n Parameters\n ----------\n msg : str\n Commit message.\n author : str\n Commit author.\n\n Returns\n -------\n dict\n Formatted commit info.\n\n Examples\n --------\n >>> client = WOQLClient("https://127.0.0.1:6363/")\n >>> client._generate_commit("<message>", "<author>")\n {\'author\': \'<author>\', \'message\': \'<message>\'}\n '
if author:
mes_author = author
else:
mes_author = self._author
if (not msg):
msg = f'Commit via python client {__version__}'
return {'author': mes_author, 'message': msg} | -1,409,766,101,435,979,300 | Pack the specified commit info into a dict format expected by the server.
Parameters
----------
msg : str
Commit message.
author : str
Commit author.
Returns
-------
dict
Formatted commit info.
Examples
--------
>>> client = WOQLClient("https://127.0.0.1:6363/")
>>> client._generate_commit("<message>", "<author>")
{'author': '<author>', 'message': '<message>'} | terminusdb_client/woqlclient/woqlClient.py | _generate_commit | terminusdb/woql-client-p | python | def _generate_commit(self, msg: Optional[str]=None, author: Optional[str]=None) -> dict:
'Pack the specified commit info into a dict format expected by the server.\n\n Parameters\n ----------\n msg : str\n Commit message.\n author : str\n Commit author.\n\n Returns\n -------\n dict\n Formatted commit info.\n\n Examples\n --------\n >>> client = WOQLClient("https://127.0.0.1:6363/")\n >>> client._generate_commit("<message>", "<author>")\n {\'author\': \'<author>\', \'message\': \'<message>\'}\n '
if author:
mes_author = author
else:
mes_author = self._author
if (not msg):
msg = f'Commit via python client {__version__}'
return {'author': mes_author, 'message': msg} |
def get_database(self, dbid: str) -> Optional[dict]:
'\n Returns metadata (id, organization, label, comment) about the requested database\n Parameters\n ----------\n dbid : str\n The id of the database\n\n Raises\n ------\n InterfaceError\n if the client does not connect to a server\n\n Returns\n -------\n dict or None if not found\n '
self._check_connection(check_db=False)
for this_db in self.get_databases():
if (this_db['name'] == dbid):
return this_db
return None | 579,466,602,066,837,800 | Returns metadata (id, organization, label, comment) about the requested database
Parameters
----------
dbid : str
The id of the database
Raises
------
InterfaceError
if the client does not connect to a server
Returns
-------
dict or None if not found | terminusdb_client/woqlclient/woqlClient.py | get_database | terminusdb/woql-client-p | python | def get_database(self, dbid: str) -> Optional[dict]:
'\n Returns metadata (id, organization, label, comment) about the requested database\n Parameters\n ----------\n dbid : str\n The id of the database\n\n Raises\n ------\n InterfaceError\n if the client does not connect to a server\n\n Returns\n -------\n dict or None if not found\n '
self._check_connection(check_db=False)
for this_db in self.get_databases():
if (this_db['name'] == dbid):
return this_db
return None |
def get_databases(self) -> List[dict]:
'\n Returns a list of database metadata records for all databases the user has access to\n\n Raises\n ------\n InterfaceError\n if the client does not connect to a server\n\n Returns\n -------\n list of dicts\n '
self._check_connection(check_db=False)
result = requests.get((self.api + '/'), headers={'user-agent': f'terminusdb-client-python/{__version__}'}, auth=self._auth())
return json.loads(_finish_response(result)) | -8,478,958,528,453,672,000 | Returns a list of database metadata records for all databases the user has access to
Raises
------
InterfaceError
if the client does not connect to a server
Returns
-------
list of dicts | terminusdb_client/woqlclient/woqlClient.py | get_databases | terminusdb/woql-client-p | python | def get_databases(self) -> List[dict]:
'\n Returns a list of database metadata records for all databases the user has access to\n\n Raises\n ------\n InterfaceError\n if the client does not connect to a server\n\n Returns\n -------\n list of dicts\n '
self._check_connection(check_db=False)
result = requests.get((self.api + '/'), headers={'user-agent': f'terminusdb-client-python/{__version__}'}, auth=self._auth())
return json.loads(_finish_response(result)) |
def list_databases(self) -> List[Dict]:
'\n Returns a list of database ids for all databases the user has access to\n\n Raises\n ------\n InterfaceError\n if the client does not connect to a server\n\n Returns\n -------\n list of dicts\n '
self._check_connection(check_db=False)
all_dbs = []
for data in self.get_databases():
all_dbs.append(data['name'])
return all_dbs | 7,350,422,752,582,065,000 | Returns a list of database ids for all databases the user has access to
Raises
------
InterfaceError
if the client does not connect to a server
Returns
-------
list of dicts | terminusdb_client/woqlclient/woqlClient.py | list_databases | terminusdb/woql-client-p | python | def list_databases(self) -> List[Dict]:
'\n Returns a list of database ids for all databases the user has access to\n\n Raises\n ------\n InterfaceError\n if the client does not connect to a server\n\n Returns\n -------\n list of dicts\n '
self._check_connection(check_db=False)
all_dbs = []
for data in self.get_databases():
all_dbs.append(data['name'])
return all_dbs |
@since('2.3.0')
def getEpsilon(self):
'\n Gets the value of epsilon or its default value.\n '
return self.getOrDefault(self.epsilon) | -1,535,077,709,341,858,600 | Gets the value of epsilon or its default value. | python/pyspark/ml/regression.py | getEpsilon | AjithShetty2489/spark | python | @since('2.3.0')
def getEpsilon(self):
'\n \n '
return self.getOrDefault(self.epsilon) |
@keyword_only
def __init__(self, featuresCol='features', labelCol='label', predictionCol='prediction', maxIter=100, regParam=0.0, elasticNetParam=0.0, tol=1e-06, fitIntercept=True, standardization=True, solver='auto', weightCol=None, aggregationDepth=2, loss='squaredError', epsilon=1.35):
'\n __init__(self, featuresCol="features", labelCol="label", predictionCol="prediction", maxIter=100, regParam=0.0, elasticNetParam=0.0, tol=1e-6, fitIntercept=True, standardization=True, solver="auto", weightCol=None, aggregationDepth=2, loss="squaredError", epsilon=1.35)\n '
super(LinearRegression, self).__init__()
self._java_obj = self._new_java_obj('org.apache.spark.ml.regression.LinearRegression', self.uid)
self._setDefault(maxIter=100, regParam=0.0, tol=1e-06, loss='squaredError', epsilon=1.35)
kwargs = self._input_kwargs
self.setParams(**kwargs) | -2,032,851,235,059,459,800 | __init__(self, featuresCol="features", labelCol="label", predictionCol="prediction", maxIter=100, regParam=0.0, elasticNetParam=0.0, tol=1e-6, fitIntercept=True, standardization=True, solver="auto", weightCol=None, aggregationDepth=2, loss="squaredError", epsilon=1.35) | python/pyspark/ml/regression.py | __init__ | AjithShetty2489/spark | python | @keyword_only
def __init__(self, featuresCol='features', labelCol='label', predictionCol='prediction', maxIter=100, regParam=0.0, elasticNetParam=0.0, tol=1e-06, fitIntercept=True, standardization=True, solver='auto', weightCol=None, aggregationDepth=2, loss='squaredError', epsilon=1.35):
'\n \n '
super(LinearRegression, self).__init__()
self._java_obj = self._new_java_obj('org.apache.spark.ml.regression.LinearRegression', self.uid)
self._setDefault(maxIter=100, regParam=0.0, tol=1e-06, loss='squaredError', epsilon=1.35)
kwargs = self._input_kwargs
self.setParams(**kwargs) |
@keyword_only
@since('1.4.0')
def setParams(self, featuresCol='features', labelCol='label', predictionCol='prediction', maxIter=100, regParam=0.0, elasticNetParam=0.0, tol=1e-06, fitIntercept=True, standardization=True, solver='auto', weightCol=None, aggregationDepth=2, loss='squaredError', epsilon=1.35):
'\n setParams(self, featuresCol="features", labelCol="label", predictionCol="prediction", maxIter=100, regParam=0.0, elasticNetParam=0.0, tol=1e-6, fitIntercept=True, standardization=True, solver="auto", weightCol=None, aggregationDepth=2, loss="squaredError", epsilon=1.35)\n Sets params for linear regression.\n '
kwargs = self._input_kwargs
return self._set(**kwargs) | 1,798,489,900,972,090,400 | setParams(self, featuresCol="features", labelCol="label", predictionCol="prediction", maxIter=100, regParam=0.0, elasticNetParam=0.0, tol=1e-6, fitIntercept=True, standardization=True, solver="auto", weightCol=None, aggregationDepth=2, loss="squaredError", epsilon=1.35)
Sets params for linear regression. | python/pyspark/ml/regression.py | setParams | AjithShetty2489/spark | python | @keyword_only
@since('1.4.0')
def setParams(self, featuresCol='features', labelCol='label', predictionCol='prediction', maxIter=100, regParam=0.0, elasticNetParam=0.0, tol=1e-06, fitIntercept=True, standardization=True, solver='auto', weightCol=None, aggregationDepth=2, loss='squaredError', epsilon=1.35):
'\n setParams(self, featuresCol="features", labelCol="label", predictionCol="prediction", maxIter=100, regParam=0.0, elasticNetParam=0.0, tol=1e-6, fitIntercept=True, standardization=True, solver="auto", weightCol=None, aggregationDepth=2, loss="squaredError", epsilon=1.35)\n Sets params for linear regression.\n '
kwargs = self._input_kwargs
return self._set(**kwargs) |
@since('2.3.0')
def setEpsilon(self, value):
'\n Sets the value of :py:attr:`epsilon`.\n '
return self._set(epsilon=value) | -6,138,312,146,789,055,000 | Sets the value of :py:attr:`epsilon`. | python/pyspark/ml/regression.py | setEpsilon | AjithShetty2489/spark | python | @since('2.3.0')
def setEpsilon(self, value):
'\n \n '
return self._set(epsilon=value) |
def setMaxIter(self, value):
'\n Sets the value of :py:attr:`maxIter`.\n '
return self._set(maxIter=value) | 1,288,476,920,971,597,800 | Sets the value of :py:attr:`maxIter`. | python/pyspark/ml/regression.py | setMaxIter | AjithShetty2489/spark | python | def setMaxIter(self, value):
'\n \n '
return self._set(maxIter=value) |
def setRegParam(self, value):
'\n Sets the value of :py:attr:`regParam`.\n '
return self._set(regParam=value) | -7,433,284,680,045,634,000 | Sets the value of :py:attr:`regParam`. | python/pyspark/ml/regression.py | setRegParam | AjithShetty2489/spark | python | def setRegParam(self, value):
'\n \n '
return self._set(regParam=value) |
def setTol(self, value):
'\n Sets the value of :py:attr:`tol`.\n '
return self._set(tol=value) | 3,579,479,604,354,210,000 | Sets the value of :py:attr:`tol`. | python/pyspark/ml/regression.py | setTol | AjithShetty2489/spark | python | def setTol(self, value):
'\n \n '
return self._set(tol=value) |
def setElasticNetParam(self, value):
'\n Sets the value of :py:attr:`elasticNetParam`.\n '
return self._set(elasticNetParam=value) | -3,621,938,483,410,980,000 | Sets the value of :py:attr:`elasticNetParam`. | python/pyspark/ml/regression.py | setElasticNetParam | AjithShetty2489/spark | python | def setElasticNetParam(self, value):
'\n \n '
return self._set(elasticNetParam=value) |
def setFitIntercept(self, value):
'\n Sets the value of :py:attr:`fitIntercept`.\n '
return self._set(fitIntercept=value) | -3,408,520,087,298,627,000 | Sets the value of :py:attr:`fitIntercept`. | python/pyspark/ml/regression.py | setFitIntercept | AjithShetty2489/spark | python | def setFitIntercept(self, value):
'\n \n '
return self._set(fitIntercept=value) |
def setStandardization(self, value):
'\n Sets the value of :py:attr:`standardization`.\n '
return self._set(standardization=value) | 2,077,692,268,013,256,400 | Sets the value of :py:attr:`standardization`. | python/pyspark/ml/regression.py | setStandardization | AjithShetty2489/spark | python | def setStandardization(self, value):
'\n \n '
return self._set(standardization=value) |
def setWeightCol(self, value):
'\n Sets the value of :py:attr:`weightCol`.\n '
return self._set(weightCol=value) | 7,126,166,856,151,208,000 | Sets the value of :py:attr:`weightCol`. | python/pyspark/ml/regression.py | setWeightCol | AjithShetty2489/spark | python | def setWeightCol(self, value):
'\n \n '
return self._set(weightCol=value) |
def setSolver(self, value):
'\n Sets the value of :py:attr:`solver`.\n '
return self._set(solver=value) | 659,948,514,240,389,100 | Sets the value of :py:attr:`solver`. | python/pyspark/ml/regression.py | setSolver | AjithShetty2489/spark | python | def setSolver(self, value):
'\n \n '
return self._set(solver=value) |
def setAggregationDepth(self, value):
'\n Sets the value of :py:attr:`aggregationDepth`.\n '
return self._set(aggregationDepth=value) | 5,049,484,725,658,685,000 | Sets the value of :py:attr:`aggregationDepth`. | python/pyspark/ml/regression.py | setAggregationDepth | AjithShetty2489/spark | python | def setAggregationDepth(self, value):
'\n \n '
return self._set(aggregationDepth=value) |
def setLoss(self, value):
'\n Sets the value of :py:attr:`loss`.\n '
return self._set(lossType=value) | 4,226,753,261,501,401,000 | Sets the value of :py:attr:`loss`. | python/pyspark/ml/regression.py | setLoss | AjithShetty2489/spark | python | def setLoss(self, value):
'\n \n '
return self._set(lossType=value) |
@property
@since('2.0.0')
def coefficients(self):
'\n Model coefficients.\n '
return self._call_java('coefficients') | 6,857,518,054,360,473,000 | Model coefficients. | python/pyspark/ml/regression.py | coefficients | AjithShetty2489/spark | python | @property
@since('2.0.0')
def coefficients(self):
'\n \n '
return self._call_java('coefficients') |
@property
@since('1.4.0')
def intercept(self):
'\n Model intercept.\n '
return self._call_java('intercept') | 5,739,049,948,923,467,000 | Model intercept. | python/pyspark/ml/regression.py | intercept | AjithShetty2489/spark | python | @property
@since('1.4.0')
def intercept(self):
'\n \n '
return self._call_java('intercept') |
@property
@since('2.3.0')
def scale(self):
'\n The value by which :math:`\\|y - X\'w\\|` is scaled down when loss is "huber", otherwise 1.0.\n '
return self._call_java('scale') | 3,241,283,480,831,272,400 | The value by which :math:`\|y - X'w\|` is scaled down when loss is "huber", otherwise 1.0. | python/pyspark/ml/regression.py | scale | AjithShetty2489/spark | python | @property
@since('2.3.0')
def scale(self):
'\n The value by which :math:`\\|y - X\'w\\|` is scaled down when loss is "huber", otherwise 1.0.\n '
return self._call_java('scale') |
@property
@since('2.0.0')
def summary(self):
'\n Gets summary (e.g. residuals, mse, r-squared ) of model on\n training set. An exception is thrown if\n `trainingSummary is None`.\n '
if self.hasSummary:
return LinearRegressionTrainingSummary(super(LinearRegressionModel, self).summary)
else:
raise RuntimeError(('No training summary available for this %s' % self.__class__.__name__)) | -8,882,501,893,985,365,000 | Gets summary (e.g. residuals, mse, r-squared ) of model on
training set. An exception is thrown if
`trainingSummary is None`. | python/pyspark/ml/regression.py | summary | AjithShetty2489/spark | python | @property
@since('2.0.0')
def summary(self):
'\n Gets summary (e.g. residuals, mse, r-squared ) of model on\n training set. An exception is thrown if\n `trainingSummary is None`.\n '
if self.hasSummary:
return LinearRegressionTrainingSummary(super(LinearRegressionModel, self).summary)
else:
raise RuntimeError(('No training summary available for this %s' % self.__class__.__name__)) |
@since('2.0.0')
def evaluate(self, dataset):
'\n Evaluates the model on a test dataset.\n\n :param dataset:\n Test dataset to evaluate model on, where dataset is an\n instance of :py:class:`pyspark.sql.DataFrame`\n '
if (not isinstance(dataset, DataFrame)):
raise ValueError(('dataset must be a DataFrame but got %s.' % type(dataset)))
java_lr_summary = self._call_java('evaluate', dataset)
return LinearRegressionSummary(java_lr_summary) | -4,808,242,261,066,155,000 | Evaluates the model on a test dataset.
:param dataset:
Test dataset to evaluate model on, where dataset is an
instance of :py:class:`pyspark.sql.DataFrame` | python/pyspark/ml/regression.py | evaluate | AjithShetty2489/spark | python | @since('2.0.0')
def evaluate(self, dataset):
'\n Evaluates the model on a test dataset.\n\n :param dataset:\n Test dataset to evaluate model on, where dataset is an\n instance of :py:class:`pyspark.sql.DataFrame`\n '
if (not isinstance(dataset, DataFrame)):
raise ValueError(('dataset must be a DataFrame but got %s.' % type(dataset)))
java_lr_summary = self._call_java('evaluate', dataset)
return LinearRegressionSummary(java_lr_summary) |
@property
@since('2.0.0')
def predictions(self):
"\n Dataframe outputted by the model's `transform` method.\n "
return self._call_java('predictions') | -8,863,001,023,905,391,000 | Dataframe outputted by the model's `transform` method. | python/pyspark/ml/regression.py | predictions | AjithShetty2489/spark | python | @property
@since('2.0.0')
def predictions(self):
"\n \n "
return self._call_java('predictions') |
@property
@since('2.0.0')
def predictionCol(self):
'\n Field in "predictions" which gives the predicted value of\n the label at each instance.\n '
return self._call_java('predictionCol') | 8,979,628,586,212,194,000 | Field in "predictions" which gives the predicted value of
the label at each instance. | python/pyspark/ml/regression.py | predictionCol | AjithShetty2489/spark | python | @property
@since('2.0.0')
def predictionCol(self):
'\n Field in "predictions" which gives the predicted value of\n the label at each instance.\n '
return self._call_java('predictionCol') |
@property
@since('2.0.0')
def labelCol(self):
'\n Field in "predictions" which gives the true label of each\n instance.\n '
return self._call_java('labelCol') | -6,334,475,539,055,536,000 | Field in "predictions" which gives the true label of each
instance. | python/pyspark/ml/regression.py | labelCol | AjithShetty2489/spark | python | @property
@since('2.0.0')
def labelCol(self):
'\n Field in "predictions" which gives the true label of each\n instance.\n '
return self._call_java('labelCol') |
@property
@since('2.0.0')
def featuresCol(self):
'\n Field in "predictions" which gives the features of each instance\n as a vector.\n '
return self._call_java('featuresCol') | 6,743,549,635,058,488,000 | Field in "predictions" which gives the features of each instance
as a vector. | python/pyspark/ml/regression.py | featuresCol | AjithShetty2489/spark | python | @property
@since('2.0.0')
def featuresCol(self):
'\n Field in "predictions" which gives the features of each instance\n as a vector.\n '
return self._call_java('featuresCol') |
@property
@since('2.0.0')
def explainedVariance(self):
'\n Returns the explained variance regression score.\n explainedVariance = :math:`1 - \\frac{variance(y - \\hat{y})}{variance(y)}`\n\n .. seealso:: `Wikipedia explain variation\n <http://en.wikipedia.org/wiki/Explained_variation>`_\n\n .. note:: This ignores instance weights (setting all to 1.0) from\n `LinearRegression.weightCol`. This will change in later Spark\n versions.\n '
return self._call_java('explainedVariance') | -8,904,734,550,016,116,000 | Returns the explained variance regression score.
explainedVariance = :math:`1 - \frac{variance(y - \hat{y})}{variance(y)}`
.. seealso:: `Wikipedia explain variation
<http://en.wikipedia.org/wiki/Explained_variation>`_
.. note:: This ignores instance weights (setting all to 1.0) from
`LinearRegression.weightCol`. This will change in later Spark
versions. | python/pyspark/ml/regression.py | explainedVariance | AjithShetty2489/spark | python | @property
@since('2.0.0')
def explainedVariance(self):
'\n Returns the explained variance regression score.\n explainedVariance = :math:`1 - \\frac{variance(y - \\hat{y})}{variance(y)}`\n\n .. seealso:: `Wikipedia explain variation\n <http://en.wikipedia.org/wiki/Explained_variation>`_\n\n .. note:: This ignores instance weights (setting all to 1.0) from\n `LinearRegression.weightCol`. This will change in later Spark\n versions.\n '
return self._call_java('explainedVariance') |
@property
@since('2.0.0')
def meanAbsoluteError(self):
'\n Returns the mean absolute error, which is a risk function\n corresponding to the expected value of the absolute error\n loss or l1-norm loss.\n\n .. note:: This ignores instance weights (setting all to 1.0) from\n `LinearRegression.weightCol`. This will change in later Spark\n versions.\n '
return self._call_java('meanAbsoluteError') | 7,154,150,193,848,016,000 | Returns the mean absolute error, which is a risk function
corresponding to the expected value of the absolute error
loss or l1-norm loss.
.. note:: This ignores instance weights (setting all to 1.0) from
`LinearRegression.weightCol`. This will change in later Spark
versions. | python/pyspark/ml/regression.py | meanAbsoluteError | AjithShetty2489/spark | python | @property
@since('2.0.0')
def meanAbsoluteError(self):
'\n Returns the mean absolute error, which is a risk function\n corresponding to the expected value of the absolute error\n loss or l1-norm loss.\n\n .. note:: This ignores instance weights (setting all to 1.0) from\n `LinearRegression.weightCol`. This will change in later Spark\n versions.\n '
return self._call_java('meanAbsoluteError') |
@property
@since('2.0.0')
def meanSquaredError(self):
'\n Returns the mean squared error, which is a risk function\n corresponding to the expected value of the squared error\n loss or quadratic loss.\n\n .. note:: This ignores instance weights (setting all to 1.0) from\n `LinearRegression.weightCol`. This will change in later Spark\n versions.\n '
return self._call_java('meanSquaredError') | -4,137,376,390,146,737,700 | Returns the mean squared error, which is a risk function
corresponding to the expected value of the squared error
loss or quadratic loss.
.. note:: This ignores instance weights (setting all to 1.0) from
`LinearRegression.weightCol`. This will change in later Spark
versions. | python/pyspark/ml/regression.py | meanSquaredError | AjithShetty2489/spark | python | @property
@since('2.0.0')
def meanSquaredError(self):
'\n Returns the mean squared error, which is a risk function\n corresponding to the expected value of the squared error\n loss or quadratic loss.\n\n .. note:: This ignores instance weights (setting all to 1.0) from\n `LinearRegression.weightCol`. This will change in later Spark\n versions.\n '
return self._call_java('meanSquaredError') |
@property
@since('2.0.0')
def rootMeanSquaredError(self):
'\n Returns the root mean squared error, which is defined as the\n square root of the mean squared error.\n\n .. note:: This ignores instance weights (setting all to 1.0) from\n `LinearRegression.weightCol`. This will change in later Spark\n versions.\n '
return self._call_java('rootMeanSquaredError') | -338,729,532,713,508,700 | Returns the root mean squared error, which is defined as the
square root of the mean squared error.
.. note:: This ignores instance weights (setting all to 1.0) from
`LinearRegression.weightCol`. This will change in later Spark
versions. | python/pyspark/ml/regression.py | rootMeanSquaredError | AjithShetty2489/spark | python | @property
@since('2.0.0')
def rootMeanSquaredError(self):
'\n Returns the root mean squared error, which is defined as the\n square root of the mean squared error.\n\n .. note:: This ignores instance weights (setting all to 1.0) from\n `LinearRegression.weightCol`. This will change in later Spark\n versions.\n '
return self._call_java('rootMeanSquaredError') |
@property
@since('2.0.0')
def r2(self):
'\n Returns R^2, the coefficient of determination.\n\n .. seealso:: `Wikipedia coefficient of determination\n <http://en.wikipedia.org/wiki/Coefficient_of_determination>`_\n\n .. note:: This ignores instance weights (setting all to 1.0) from\n `LinearRegression.weightCol`. This will change in later Spark\n versions.\n '
return self._call_java('r2') | -6,180,160,953,391,052,000 | Returns R^2, the coefficient of determination.
.. seealso:: `Wikipedia coefficient of determination
<http://en.wikipedia.org/wiki/Coefficient_of_determination>`_
.. note:: This ignores instance weights (setting all to 1.0) from
`LinearRegression.weightCol`. This will change in later Spark
versions. | python/pyspark/ml/regression.py | r2 | AjithShetty2489/spark | python | @property
@since('2.0.0')
def r2(self):
'\n Returns R^2, the coefficient of determination.\n\n .. seealso:: `Wikipedia coefficient of determination\n <http://en.wikipedia.org/wiki/Coefficient_of_determination>`_\n\n .. note:: This ignores instance weights (setting all to 1.0) from\n `LinearRegression.weightCol`. This will change in later Spark\n versions.\n '
return self._call_java('r2') |
@property
@since('2.4.0')
def r2adj(self):
'\n Returns Adjusted R^2, the adjusted coefficient of determination.\n\n .. seealso:: `Wikipedia coefficient of determination, Adjusted R^2\n <https://en.wikipedia.org/wiki/Coefficient_of_determination#Adjusted_R2>`_\n\n .. note:: This ignores instance weights (setting all to 1.0) from\n `LinearRegression.weightCol`. This will change in later Spark versions.\n '
return self._call_java('r2adj') | -2,022,324,800,128,418,300 | Returns Adjusted R^2, the adjusted coefficient of determination.
.. seealso:: `Wikipedia coefficient of determination, Adjusted R^2
<https://en.wikipedia.org/wiki/Coefficient_of_determination#Adjusted_R2>`_
.. note:: This ignores instance weights (setting all to 1.0) from
`LinearRegression.weightCol`. This will change in later Spark versions. | python/pyspark/ml/regression.py | r2adj | AjithShetty2489/spark | python | @property
@since('2.4.0')
def r2adj(self):
'\n Returns Adjusted R^2, the adjusted coefficient of determination.\n\n .. seealso:: `Wikipedia coefficient of determination, Adjusted R^2\n <https://en.wikipedia.org/wiki/Coefficient_of_determination#Adjusted_R2>`_\n\n .. note:: This ignores instance weights (setting all to 1.0) from\n `LinearRegression.weightCol`. This will change in later Spark versions.\n '
return self._call_java('r2adj') |
@property
@since('2.0.0')
def residuals(self):
'\n Residuals (label - predicted value)\n '
return self._call_java('residuals') | 6,089,670,078,438,460,000 | Residuals (label - predicted value) | python/pyspark/ml/regression.py | residuals | AjithShetty2489/spark | python | @property
@since('2.0.0')
def residuals(self):
'\n \n '
return self._call_java('residuals') |
@property
@since('2.0.0')
def numInstances(self):
'\n Number of instances in DataFrame predictions\n '
return self._call_java('numInstances') | 567,005,979,655,261,800 | Number of instances in DataFrame predictions | python/pyspark/ml/regression.py | numInstances | AjithShetty2489/spark | python | @property
@since('2.0.0')
def numInstances(self):
'\n \n '
return self._call_java('numInstances') |
@property
@since('2.2.0')
def degreesOfFreedom(self):
'\n Degrees of freedom.\n '
return self._call_java('degreesOfFreedom') | 8,608,220,457,733,950,000 | Degrees of freedom. | python/pyspark/ml/regression.py | degreesOfFreedom | AjithShetty2489/spark | python | @property
@since('2.2.0')
def degreesOfFreedom(self):
'\n \n '
return self._call_java('degreesOfFreedom') |
@property
@since('2.0.0')
def devianceResiduals(self):
'\n The weighted residuals, the usual residuals rescaled by the\n square root of the instance weights.\n '
return self._call_java('devianceResiduals') | -5,755,805,787,819,274,000 | The weighted residuals, the usual residuals rescaled by the
square root of the instance weights. | python/pyspark/ml/regression.py | devianceResiduals | AjithShetty2489/spark | python | @property
@since('2.0.0')
def devianceResiduals(self):
'\n The weighted residuals, the usual residuals rescaled by the\n square root of the instance weights.\n '
return self._call_java('devianceResiduals') |
@property
@since('2.0.0')
def coefficientStandardErrors(self):
'\n Standard error of estimated coefficients and intercept.\n This value is only available when using the "normal" solver.\n\n If :py:attr:`LinearRegression.fitIntercept` is set to True,\n then the last element returned corresponds to the intercept.\n\n .. seealso:: :py:attr:`LinearRegression.solver`\n '
return self._call_java('coefficientStandardErrors') | -5,840,749,010,366,116,000 | Standard error of estimated coefficients and intercept.
This value is only available when using the "normal" solver.
If :py:attr:`LinearRegression.fitIntercept` is set to True,
then the last element returned corresponds to the intercept.
.. seealso:: :py:attr:`LinearRegression.solver` | python/pyspark/ml/regression.py | coefficientStandardErrors | AjithShetty2489/spark | python | @property
@since('2.0.0')
def coefficientStandardErrors(self):
'\n Standard error of estimated coefficients and intercept.\n This value is only available when using the "normal" solver.\n\n If :py:attr:`LinearRegression.fitIntercept` is set to True,\n then the last element returned corresponds to the intercept.\n\n .. seealso:: :py:attr:`LinearRegression.solver`\n '
return self._call_java('coefficientStandardErrors') |
@property
@since('2.0.0')
def tValues(self):
'\n T-statistic of estimated coefficients and intercept.\n This value is only available when using the "normal" solver.\n\n If :py:attr:`LinearRegression.fitIntercept` is set to True,\n then the last element returned corresponds to the intercept.\n\n .. seealso:: :py:attr:`LinearRegression.solver`\n '
return self._call_java('tValues') | -4,420,046,533,744,205,000 | T-statistic of estimated coefficients and intercept.
This value is only available when using the "normal" solver.
If :py:attr:`LinearRegression.fitIntercept` is set to True,
then the last element returned corresponds to the intercept.
.. seealso:: :py:attr:`LinearRegression.solver` | python/pyspark/ml/regression.py | tValues | AjithShetty2489/spark | python | @property
@since('2.0.0')
def tValues(self):
'\n T-statistic of estimated coefficients and intercept.\n This value is only available when using the "normal" solver.\n\n If :py:attr:`LinearRegression.fitIntercept` is set to True,\n then the last element returned corresponds to the intercept.\n\n .. seealso:: :py:attr:`LinearRegression.solver`\n '
return self._call_java('tValues') |
@property
@since('2.0.0')
def pValues(self):
'\n Two-sided p-value of estimated coefficients and intercept.\n This value is only available when using the "normal" solver.\n\n If :py:attr:`LinearRegression.fitIntercept` is set to True,\n then the last element returned corresponds to the intercept.\n\n .. seealso:: :py:attr:`LinearRegression.solver`\n '
return self._call_java('pValues') | -827,290,872,445,449,000 | Two-sided p-value of estimated coefficients and intercept.
This value is only available when using the "normal" solver.
If :py:attr:`LinearRegression.fitIntercept` is set to True,
then the last element returned corresponds to the intercept.
.. seealso:: :py:attr:`LinearRegression.solver` | python/pyspark/ml/regression.py | pValues | AjithShetty2489/spark | python | @property
@since('2.0.0')
def pValues(self):
'\n Two-sided p-value of estimated coefficients and intercept.\n This value is only available when using the "normal" solver.\n\n If :py:attr:`LinearRegression.fitIntercept` is set to True,\n then the last element returned corresponds to the intercept.\n\n .. seealso:: :py:attr:`LinearRegression.solver`\n '
return self._call_java('pValues') |
@property
@since('2.0.0')
def objectiveHistory(self):
'\n Objective function (scaled loss + regularization) at each\n iteration.\n This value is only available when using the "l-bfgs" solver.\n\n .. seealso:: :py:attr:`LinearRegression.solver`\n '
return self._call_java('objectiveHistory') | -7,613,942,411,571,399,000 | Objective function (scaled loss + regularization) at each
iteration.
This value is only available when using the "l-bfgs" solver.
.. seealso:: :py:attr:`LinearRegression.solver` | python/pyspark/ml/regression.py | objectiveHistory | AjithShetty2489/spark | python | @property
@since('2.0.0')
def objectiveHistory(self):
'\n Objective function (scaled loss + regularization) at each\n iteration.\n This value is only available when using the "l-bfgs" solver.\n\n .. seealso:: :py:attr:`LinearRegression.solver`\n '
return self._call_java('objectiveHistory') |
@property
@since('2.0.0')
def totalIterations(self):
'\n Number of training iterations until termination.\n This value is only available when using the "l-bfgs" solver.\n\n .. seealso:: :py:attr:`LinearRegression.solver`\n '
return self._call_java('totalIterations') | -6,211,222,178,903,529,000 | Number of training iterations until termination.
This value is only available when using the "l-bfgs" solver.
.. seealso:: :py:attr:`LinearRegression.solver` | python/pyspark/ml/regression.py | totalIterations | AjithShetty2489/spark | python | @property
@since('2.0.0')
def totalIterations(self):
'\n Number of training iterations until termination.\n This value is only available when using the "l-bfgs" solver.\n\n .. seealso:: :py:attr:`LinearRegression.solver`\n '
return self._call_java('totalIterations') |
def getIsotonic(self):
'\n Gets the value of isotonic or its default value.\n '
return self.getOrDefault(self.isotonic) | 2,921,464,487,635,401,000 | Gets the value of isotonic or its default value. | python/pyspark/ml/regression.py | getIsotonic | AjithShetty2489/spark | python | def getIsotonic(self):
'\n \n '
return self.getOrDefault(self.isotonic) |
def getFeatureIndex(self):
'\n Gets the value of featureIndex or its default value.\n '
return self.getOrDefault(self.featureIndex) | -425,671,395,134,212,900 | Gets the value of featureIndex or its default value. | python/pyspark/ml/regression.py | getFeatureIndex | AjithShetty2489/spark | python | def getFeatureIndex(self):
'\n \n '
return self.getOrDefault(self.featureIndex) |
@keyword_only
def __init__(self, featuresCol='features', labelCol='label', predictionCol='prediction', weightCol=None, isotonic=True, featureIndex=0):
'\n __init__(self, featuresCol="features", labelCol="label", predictionCol="prediction", weightCol=None, isotonic=True, featureIndex=0):\n '
super(IsotonicRegression, self).__init__()
self._java_obj = self._new_java_obj('org.apache.spark.ml.regression.IsotonicRegression', self.uid)
self._setDefault(isotonic=True, featureIndex=0)
kwargs = self._input_kwargs
self.setParams(**kwargs) | -5,562,647,240,495,843,000 | __init__(self, featuresCol="features", labelCol="label", predictionCol="prediction", weightCol=None, isotonic=True, featureIndex=0): | python/pyspark/ml/regression.py | __init__ | AjithShetty2489/spark | python | @keyword_only
def __init__(self, featuresCol='features', labelCol='label', predictionCol='prediction', weightCol=None, isotonic=True, featureIndex=0):
'\n \n '
super(IsotonicRegression, self).__init__()
self._java_obj = self._new_java_obj('org.apache.spark.ml.regression.IsotonicRegression', self.uid)
self._setDefault(isotonic=True, featureIndex=0)
kwargs = self._input_kwargs
self.setParams(**kwargs) |
@keyword_only
def setParams(self, featuresCol='features', labelCol='label', predictionCol='prediction', weightCol=None, isotonic=True, featureIndex=0):
'\n setParams(self, featuresCol="features", labelCol="label", predictionCol="prediction", weightCol=None, isotonic=True, featureIndex=0):\n Set the params for IsotonicRegression.\n '
kwargs = self._input_kwargs
return self._set(**kwargs) | -4,398,090,866,832,613,000 | setParams(self, featuresCol="features", labelCol="label", predictionCol="prediction", weightCol=None, isotonic=True, featureIndex=0):
Set the params for IsotonicRegression. | python/pyspark/ml/regression.py | setParams | AjithShetty2489/spark | python | @keyword_only
def setParams(self, featuresCol='features', labelCol='label', predictionCol='prediction', weightCol=None, isotonic=True, featureIndex=0):
'\n setParams(self, featuresCol="features", labelCol="label", predictionCol="prediction", weightCol=None, isotonic=True, featureIndex=0):\n Set the params for IsotonicRegression.\n '
kwargs = self._input_kwargs
return self._set(**kwargs) |
def setIsotonic(self, value):
'\n Sets the value of :py:attr:`isotonic`.\n '
return self._set(isotonic=value) | 6,740,910,986,481,251,000 | Sets the value of :py:attr:`isotonic`. | python/pyspark/ml/regression.py | setIsotonic | AjithShetty2489/spark | python | def setIsotonic(self, value):
'\n \n '
return self._set(isotonic=value) |
def setFeatureIndex(self, value):
'\n Sets the value of :py:attr:`featureIndex`.\n '
return self._set(featureIndex=value) | 712,460,935,526,708,900 | Sets the value of :py:attr:`featureIndex`. | python/pyspark/ml/regression.py | setFeatureIndex | AjithShetty2489/spark | python | def setFeatureIndex(self, value):
'\n \n '
return self._set(featureIndex=value) |
@since('1.6.0')
def setFeaturesCol(self, value):
'\n Sets the value of :py:attr:`featuresCol`.\n '
return self._set(featuresCol=value) | 668,118,026,924,361,500 | Sets the value of :py:attr:`featuresCol`. | python/pyspark/ml/regression.py | setFeaturesCol | AjithShetty2489/spark | python | @since('1.6.0')
def setFeaturesCol(self, value):
'\n \n '
return self._set(featuresCol=value) |
@since('1.6.0')
def setPredictionCol(self, value):
'\n Sets the value of :py:attr:`predictionCol`.\n '
return self._set(predictionCol=value) | -5,502,708,142,385,355,000 | Sets the value of :py:attr:`predictionCol`. | python/pyspark/ml/regression.py | setPredictionCol | AjithShetty2489/spark | python | @since('1.6.0')
def setPredictionCol(self, value):
'\n \n '
return self._set(predictionCol=value) |
@since('1.6.0')
def setLabelCol(self, value):
'\n Sets the value of :py:attr:`labelCol`.\n '
return self._set(labelCol=value) | 5,234,744,185,482,678,000 | Sets the value of :py:attr:`labelCol`. | python/pyspark/ml/regression.py | setLabelCol | AjithShetty2489/spark | python | @since('1.6.0')
def setLabelCol(self, value):
'\n \n '
return self._set(labelCol=value) |
@since('1.6.0')
def setWeightCol(self, value):
'\n Sets the value of :py:attr:`weightCol`.\n '
return self._set(weightCol=value) | 111,680,165,829,194,820 | Sets the value of :py:attr:`weightCol`. | python/pyspark/ml/regression.py | setWeightCol | AjithShetty2489/spark | python | @since('1.6.0')
def setWeightCol(self, value):
'\n \n '
return self._set(weightCol=value) |
@since('3.0.0')
def setFeaturesCol(self, value):
'\n Sets the value of :py:attr:`featuresCol`.\n '
return self._set(featuresCol=value) | -4,158,365,003,254,860,000 | Sets the value of :py:attr:`featuresCol`. | python/pyspark/ml/regression.py | setFeaturesCol | AjithShetty2489/spark | python | @since('3.0.0')
def setFeaturesCol(self, value):
'\n \n '
return self._set(featuresCol=value) |
@since('3.0.0')
def setPredictionCol(self, value):
'\n Sets the value of :py:attr:`predictionCol`.\n '
return self._set(predictionCol=value) | 1,012,894,679,154,316,400 | Sets the value of :py:attr:`predictionCol`. | python/pyspark/ml/regression.py | setPredictionCol | AjithShetty2489/spark | python | @since('3.0.0')
def setPredictionCol(self, value):
'\n \n '
return self._set(predictionCol=value) |
def setFeatureIndex(self, value):
'\n Sets the value of :py:attr:`featureIndex`.\n '
return self._set(featureIndex=value) | 712,460,935,526,708,900 | Sets the value of :py:attr:`featureIndex`. | python/pyspark/ml/regression.py | setFeatureIndex | AjithShetty2489/spark | python | def setFeatureIndex(self, value):
'\n \n '
return self._set(featureIndex=value) |
@property
@since('1.6.0')
def boundaries(self):
'\n Boundaries in increasing order for which predictions are known.\n '
return self._call_java('boundaries') | 25,368,690,135,028,264 | Boundaries in increasing order for which predictions are known. | python/pyspark/ml/regression.py | boundaries | AjithShetty2489/spark | python | @property
@since('1.6.0')
def boundaries(self):
'\n \n '
return self._call_java('boundaries') |
@property
@since('1.6.0')
def predictions(self):
'\n Predictions associated with the boundaries at the same index, monotone because of isotonic\n regression.\n '
return self._call_java('predictions') | 583,706,518,878,989,700 | Predictions associated with the boundaries at the same index, monotone because of isotonic
regression. | python/pyspark/ml/regression.py | predictions | AjithShetty2489/spark | python | @property
@since('1.6.0')
def predictions(self):
'\n Predictions associated with the boundaries at the same index, monotone because of isotonic\n regression.\n '
return self._call_java('predictions') |
@since('3.0.0')
def numFeatures(self):
'\n Returns the number of features the model was trained on. If unknown, returns -1\n '
return self._call_java('numFeatures') | -481,038,317,992,713,100 | Returns the number of features the model was trained on. If unknown, returns -1 | python/pyspark/ml/regression.py | numFeatures | AjithShetty2489/spark | python | @since('3.0.0')
def numFeatures(self):
'\n \n '
return self._call_java('numFeatures') |
@since('3.0.0')
def predict(self, value):
'\n Predict label for the given features.\n '
return self._call_java('predict', value) | -3,504,815,097,445,081,000 | Predict label for the given features. | python/pyspark/ml/regression.py | predict | AjithShetty2489/spark | python | @since('3.0.0')
def predict(self, value):
'\n \n '
return self._call_java('predict', value) |
@keyword_only
def __init__(self, featuresCol='features', labelCol='label', predictionCol='prediction', maxDepth=5, maxBins=32, minInstancesPerNode=1, minInfoGain=0.0, maxMemoryInMB=256, cacheNodeIds=False, checkpointInterval=10, impurity='variance', seed=None, varianceCol=None, weightCol=None, leafCol='', minWeightFractionPerNode=0.0):
'\n __init__(self, featuresCol="features", labelCol="label", predictionCol="prediction", maxDepth=5, maxBins=32, minInstancesPerNode=1, minInfoGain=0.0, maxMemoryInMB=256, cacheNodeIds=False, checkpointInterval=10, impurity="variance", seed=None, varianceCol=None, weightCol=None, leafCol="", minWeightFractionPerNode=0.0)\n '
super(DecisionTreeRegressor, self).__init__()
self._java_obj = self._new_java_obj('org.apache.spark.ml.regression.DecisionTreeRegressor', self.uid)
self._setDefault(maxDepth=5, maxBins=32, minInstancesPerNode=1, minInfoGain=0.0, maxMemoryInMB=256, cacheNodeIds=False, checkpointInterval=10, impurity='variance', leafCol='', minWeightFractionPerNode=0.0)
kwargs = self._input_kwargs
self.setParams(**kwargs) | 6,855,883,870,003,851,000 | __init__(self, featuresCol="features", labelCol="label", predictionCol="prediction", maxDepth=5, maxBins=32, minInstancesPerNode=1, minInfoGain=0.0, maxMemoryInMB=256, cacheNodeIds=False, checkpointInterval=10, impurity="variance", seed=None, varianceCol=None, weightCol=None, leafCol="", minWeightFractionPerNode=0.0) | python/pyspark/ml/regression.py | __init__ | AjithShetty2489/spark | python | @keyword_only
def __init__(self, featuresCol='features', labelCol='label', predictionCol='prediction', maxDepth=5, maxBins=32, minInstancesPerNode=1, minInfoGain=0.0, maxMemoryInMB=256, cacheNodeIds=False, checkpointInterval=10, impurity='variance', seed=None, varianceCol=None, weightCol=None, leafCol=, minWeightFractionPerNode=0.0):
'\n \n '
super(DecisionTreeRegressor, self).__init__()
self._java_obj = self._new_java_obj('org.apache.spark.ml.regression.DecisionTreeRegressor', self.uid)
self._setDefault(maxDepth=5, maxBins=32, minInstancesPerNode=1, minInfoGain=0.0, maxMemoryInMB=256, cacheNodeIds=False, checkpointInterval=10, impurity='variance', leafCol=, minWeightFractionPerNode=0.0)
kwargs = self._input_kwargs
self.setParams(**kwargs) |
@keyword_only
@since('1.4.0')
def setParams(self, featuresCol='features', labelCol='label', predictionCol='prediction', maxDepth=5, maxBins=32, minInstancesPerNode=1, minInfoGain=0.0, maxMemoryInMB=256, cacheNodeIds=False, checkpointInterval=10, impurity='variance', seed=None, varianceCol=None, weightCol=None, leafCol='', minWeightFractionPerNode=0.0):
'\n setParams(self, featuresCol="features", labelCol="label", predictionCol="prediction", maxDepth=5, maxBins=32, minInstancesPerNode=1, minInfoGain=0.0, maxMemoryInMB=256, cacheNodeIds=False, checkpointInterval=10, impurity="variance", seed=None, varianceCol=None, weightCol=None, leafCol="", minWeightFractionPerNode=0.0)\n Sets params for the DecisionTreeRegressor.\n '
kwargs = self._input_kwargs
return self._set(**kwargs) | -5,414,660,128,328,874,000 | setParams(self, featuresCol="features", labelCol="label", predictionCol="prediction", maxDepth=5, maxBins=32, minInstancesPerNode=1, minInfoGain=0.0, maxMemoryInMB=256, cacheNodeIds=False, checkpointInterval=10, impurity="variance", seed=None, varianceCol=None, weightCol=None, leafCol="", minWeightFractionPerNode=0.0)
Sets params for the DecisionTreeRegressor. | python/pyspark/ml/regression.py | setParams | AjithShetty2489/spark | python | @keyword_only
@since('1.4.0')
def setParams(self, featuresCol='features', labelCol='label', predictionCol='prediction', maxDepth=5, maxBins=32, minInstancesPerNode=1, minInfoGain=0.0, maxMemoryInMB=256, cacheNodeIds=False, checkpointInterval=10, impurity='variance', seed=None, varianceCol=None, weightCol=None, leafCol=, minWeightFractionPerNode=0.0):
'\n setParams(self, featuresCol="features", labelCol="label", predictionCol="prediction", maxDepth=5, maxBins=32, minInstancesPerNode=1, minInfoGain=0.0, maxMemoryInMB=256, cacheNodeIds=False, checkpointInterval=10, impurity="variance", seed=None, varianceCol=None, weightCol=None, leafCol=, minWeightFractionPerNode=0.0)\n Sets params for the DecisionTreeRegressor.\n '
kwargs = self._input_kwargs
return self._set(**kwargs) |
@since('1.4.0')
def setMaxDepth(self, value):
'\n Sets the value of :py:attr:`maxDepth`.\n '
return self._set(maxDepth=value) | 6,900,103,097,222,239,000 | Sets the value of :py:attr:`maxDepth`. | python/pyspark/ml/regression.py | setMaxDepth | AjithShetty2489/spark | python | @since('1.4.0')
def setMaxDepth(self, value):
'\n \n '
return self._set(maxDepth=value) |
@since('1.4.0')
def setMaxBins(self, value):
'\n Sets the value of :py:attr:`maxBins`.\n '
return self._set(maxBins=value) | -7,662,286,719,655,598,000 | Sets the value of :py:attr:`maxBins`. | python/pyspark/ml/regression.py | setMaxBins | AjithShetty2489/spark | python | @since('1.4.0')
def setMaxBins(self, value):
'\n \n '
return self._set(maxBins=value) |
@since('1.4.0')
def setMinInstancesPerNode(self, value):
'\n Sets the value of :py:attr:`minInstancesPerNode`.\n '
return self._set(minInstancesPerNode=value) | -8,071,360,048,086,069,000 | Sets the value of :py:attr:`minInstancesPerNode`. | python/pyspark/ml/regression.py | setMinInstancesPerNode | AjithShetty2489/spark | python | @since('1.4.0')
def setMinInstancesPerNode(self, value):
'\n \n '
return self._set(minInstancesPerNode=value) |
@since('3.0.0')
def setMinWeightFractionPerNode(self, value):
'\n Sets the value of :py:attr:`minWeightFractionPerNode`.\n '
return self._set(minWeightFractionPerNode=value) | 5,709,196,588,527,269,000 | Sets the value of :py:attr:`minWeightFractionPerNode`. | python/pyspark/ml/regression.py | setMinWeightFractionPerNode | AjithShetty2489/spark | python | @since('3.0.0')
def setMinWeightFractionPerNode(self, value):
'\n \n '
return self._set(minWeightFractionPerNode=value) |
@since('1.4.0')
def setMinInfoGain(self, value):
'\n Sets the value of :py:attr:`minInfoGain`.\n '
return self._set(minInfoGain=value) | -2,189,222,529,958,267,000 | Sets the value of :py:attr:`minInfoGain`. | python/pyspark/ml/regression.py | setMinInfoGain | AjithShetty2489/spark | python | @since('1.4.0')
def setMinInfoGain(self, value):
'\n \n '
return self._set(minInfoGain=value) |
@since('1.4.0')
def setMaxMemoryInMB(self, value):
'\n Sets the value of :py:attr:`maxMemoryInMB`.\n '
return self._set(maxMemoryInMB=value) | -7,743,889,602,156,593,000 | Sets the value of :py:attr:`maxMemoryInMB`. | python/pyspark/ml/regression.py | setMaxMemoryInMB | AjithShetty2489/spark | python | @since('1.4.0')
def setMaxMemoryInMB(self, value):
'\n \n '
return self._set(maxMemoryInMB=value) |
@since('1.4.0')
def setCacheNodeIds(self, value):
'\n Sets the value of :py:attr:`cacheNodeIds`.\n '
return self._set(cacheNodeIds=value) | -7,957,309,380,185,966,000 | Sets the value of :py:attr:`cacheNodeIds`. | python/pyspark/ml/regression.py | setCacheNodeIds | AjithShetty2489/spark | python | @since('1.4.0')
def setCacheNodeIds(self, value):
'\n \n '
return self._set(cacheNodeIds=value) |
@since('1.4.0')
def setImpurity(self, value):
'\n Sets the value of :py:attr:`impurity`.\n '
return self._set(impurity=value) | 5,925,454,725,552,672,000 | Sets the value of :py:attr:`impurity`. | python/pyspark/ml/regression.py | setImpurity | AjithShetty2489/spark | python | @since('1.4.0')
def setImpurity(self, value):
'\n \n '
return self._set(impurity=value) |
@since('1.4.0')
def setCheckpointInterval(self, value):
'\n Sets the value of :py:attr:`checkpointInterval`.\n '
return self._set(checkpointInterval=value) | -7,454,580,376,492,684,000 | Sets the value of :py:attr:`checkpointInterval`. | python/pyspark/ml/regression.py | setCheckpointInterval | AjithShetty2489/spark | python | @since('1.4.0')
def setCheckpointInterval(self, value):
'\n \n '
return self._set(checkpointInterval=value) |
def setSeed(self, value):
'\n Sets the value of :py:attr:`seed`.\n '
return self._set(seed=value) | -88,293,150,966,480,180 | Sets the value of :py:attr:`seed`. | python/pyspark/ml/regression.py | setSeed | AjithShetty2489/spark | python | def setSeed(self, value):
'\n \n '
return self._set(seed=value) |
@since('3.0.0')
def setWeightCol(self, value):
'\n Sets the value of :py:attr:`weightCol`.\n '
return self._set(weightCol=value) | 3,791,292,180,445,544,000 | Sets the value of :py:attr:`weightCol`. | python/pyspark/ml/regression.py | setWeightCol | AjithShetty2489/spark | python | @since('3.0.0')
def setWeightCol(self, value):
'\n \n '
return self._set(weightCol=value) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.