body
stringlengths 26
98.2k
| body_hash
int64 -9,222,864,604,528,158,000
9,221,803,474B
| docstring
stringlengths 1
16.8k
| path
stringlengths 5
230
| name
stringlengths 1
96
| repository_name
stringlengths 7
89
| lang
stringclasses 1
value | body_without_docstring
stringlengths 20
98.2k
|
---|---|---|---|---|---|---|---|
def AcidocellaSpMxAz02(directed: bool=False, preprocess: bool=True, load_nodes: bool=True, verbose: int=2, cache: bool=True, cache_path: str='graphs/string', version: str='links.v11.5', **additional_graph_kwargs: Dict) -> Graph:
'Return new instance of the Acidocella sp. MX-AZ02 graph.\n\n The graph is automatically retrieved from the STRING repository.\t\n\n Parameters\n -------------------\n directed: bool = False\n Wether to load the graph as directed or undirected.\n By default false.\n preprocess: bool = True\n Whether to preprocess the graph to be loaded in \n optimal time and memory.\n load_nodes: bool = True,\n Whether to load the nodes vocabulary or treat the nodes\n simply as a numeric range.\n verbose: int = 2,\n Wether to show loading bars during the retrieval and building\n of the graph.\n cache: bool = True\n Whether to use cache, i.e. download files only once\n and preprocess them only once.\n cache_path: str = "graphs"\n Where to store the downloaded graphs.\n version: str = "links.v11.5"\n The version of the graph to retrieve.\t\t\n\tThe available versions are:\n\t\t\t- homology.v11.5\n\t\t\t- physical.links.v11.5\n\t\t\t- links.v11.5\n additional_graph_kwargs: Dict\n Additional graph kwargs.\n\n Returns\n -----------------------\n Instace of Acidocella sp. MX-AZ02 graph.\n\n\tReferences\n\t---------------------\n\tPlease cite the following if you use the data:\n\t\n\t```bib\n\t@article{szklarczyk2019string,\n\t title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},\n\t author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},\n\t journal={Nucleic acids research},\n\t volume={47},\n\t number={D1},\n\t pages={D607--D613},\n\t year={2019},\n\t publisher={Oxford University Press}\n\t}\n\t```\n '
return AutomaticallyRetrievedGraph(graph_name='AcidocellaSpMxAz02', repository='string', version=version, directed=directed, preprocess=preprocess, load_nodes=load_nodes, verbose=verbose, cache=cache, cache_path=cache_path, additional_graph_kwargs=additional_graph_kwargs)() | -4,163,455,853,893,054,500 | Return new instance of the Acidocella sp. MX-AZ02 graph.
The graph is automatically retrieved from the STRING repository.
Parameters
-------------------
directed: bool = False
Wether to load the graph as directed or undirected.
By default false.
preprocess: bool = True
Whether to preprocess the graph to be loaded in
optimal time and memory.
load_nodes: bool = True,
Whether to load the nodes vocabulary or treat the nodes
simply as a numeric range.
verbose: int = 2,
Wether to show loading bars during the retrieval and building
of the graph.
cache: bool = True
Whether to use cache, i.e. download files only once
and preprocess them only once.
cache_path: str = "graphs"
Where to store the downloaded graphs.
version: str = "links.v11.5"
The version of the graph to retrieve.
The available versions are:
- homology.v11.5
- physical.links.v11.5
- links.v11.5
additional_graph_kwargs: Dict
Additional graph kwargs.
Returns
-----------------------
Instace of Acidocella sp. MX-AZ02 graph.
References
---------------------
Please cite the following if you use the data:
```bib
@article{szklarczyk2019string,
title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},
author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},
journal={Nucleic acids research},
volume={47},
number={D1},
pages={D607--D613},
year={2019},
publisher={Oxford University Press}
}
``` | bindings/python/ensmallen/datasets/string/acidocellaspmxaz02.py | AcidocellaSpMxAz02 | AnacletoLAB/ensmallen | python | def AcidocellaSpMxAz02(directed: bool=False, preprocess: bool=True, load_nodes: bool=True, verbose: int=2, cache: bool=True, cache_path: str='graphs/string', version: str='links.v11.5', **additional_graph_kwargs: Dict) -> Graph:
'Return new instance of the Acidocella sp. MX-AZ02 graph.\n\n The graph is automatically retrieved from the STRING repository.\t\n\n Parameters\n -------------------\n directed: bool = False\n Wether to load the graph as directed or undirected.\n By default false.\n preprocess: bool = True\n Whether to preprocess the graph to be loaded in \n optimal time and memory.\n load_nodes: bool = True,\n Whether to load the nodes vocabulary or treat the nodes\n simply as a numeric range.\n verbose: int = 2,\n Wether to show loading bars during the retrieval and building\n of the graph.\n cache: bool = True\n Whether to use cache, i.e. download files only once\n and preprocess them only once.\n cache_path: str = "graphs"\n Where to store the downloaded graphs.\n version: str = "links.v11.5"\n The version of the graph to retrieve.\t\t\n\tThe available versions are:\n\t\t\t- homology.v11.5\n\t\t\t- physical.links.v11.5\n\t\t\t- links.v11.5\n additional_graph_kwargs: Dict\n Additional graph kwargs.\n\n Returns\n -----------------------\n Instace of Acidocella sp. MX-AZ02 graph.\n\n\tReferences\n\t---------------------\n\tPlease cite the following if you use the data:\n\t\n\t```bib\n\t@article{szklarczyk2019string,\n\t title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},\n\t author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},\n\t journal={Nucleic acids research},\n\t volume={47},\n\t number={D1},\n\t pages={D607--D613},\n\t year={2019},\n\t publisher={Oxford University Press}\n\t}\n\t```\n '
return AutomaticallyRetrievedGraph(graph_name='AcidocellaSpMxAz02', repository='string', version=version, directed=directed, preprocess=preprocess, load_nodes=load_nodes, verbose=verbose, cache=cache, cache_path=cache_path, additional_graph_kwargs=additional_graph_kwargs)() |
def create_ipython_console(app, title, view_width, view_ht):
' create a iPython console with a rayoptics environment '
opt_model = app.app_manager.model
if opt_model:
ro_env = {'app': app, 'opm': opt_model, 'sm': opt_model.seq_model, 'osp': opt_model.optical_spec, 'pm': opt_model.parax_model}
else:
ro_env = {'app': app, 'opm': opt_model}
ro_setup = 'from rayoptics.environment import *'
ipy_console = ConsoleWidget()
ipy_console.execute_command(ro_setup)
ipy_console.push_vars(ro_env)
mi = ModelInfo(opt_model)
sub_window = app.add_subwindow(ipy_console, mi)
sub_window.setWindowTitle(title)
(orig_x, orig_y) = app.initial_window_offset()
sub_window.setGeometry(orig_x, orig_y, view_width, view_ht)
sub_window.show() | 4,749,318,609,762,118,000 | create a iPython console with a rayoptics environment | src/rayoptics/qtgui/ipyconsole.py | create_ipython_console | NelisW/ray-optics | python | def create_ipython_console(app, title, view_width, view_ht):
' '
opt_model = app.app_manager.model
if opt_model:
ro_env = {'app': app, 'opm': opt_model, 'sm': opt_model.seq_model, 'osp': opt_model.optical_spec, 'pm': opt_model.parax_model}
else:
ro_env = {'app': app, 'opm': opt_model}
ro_setup = 'from rayoptics.environment import *'
ipy_console = ConsoleWidget()
ipy_console.execute_command(ro_setup)
ipy_console.push_vars(ro_env)
mi = ModelInfo(opt_model)
sub_window = app.add_subwindow(ipy_console, mi)
sub_window.setWindowTitle(title)
(orig_x, orig_y) = app.initial_window_offset()
sub_window.setGeometry(orig_x, orig_y, view_width, view_ht)
sub_window.show() |
def push_vars(self, variableDict):
'\n Given a dictionary containing name / value pairs, push those variables\n to the Jupyter console widget\n '
self.kernel_manager.kernel.shell.push(variableDict) | -6,908,998,854,733,955,000 | Given a dictionary containing name / value pairs, push those variables
to the Jupyter console widget | src/rayoptics/qtgui/ipyconsole.py | push_vars | NelisW/ray-optics | python | def push_vars(self, variableDict):
'\n Given a dictionary containing name / value pairs, push those variables\n to the Jupyter console widget\n '
self.kernel_manager.kernel.shell.push(variableDict) |
def clear(self):
'\n Clears the terminal\n '
self._control.clear() | 8,491,764,359,652,678,000 | Clears the terminal | src/rayoptics/qtgui/ipyconsole.py | clear | NelisW/ray-optics | python | def clear(self):
'\n \n '
self._control.clear() |
def print_text(self, text):
'\n Prints some plain text to the console\n '
self._append_plain_text(text) | 3,013,955,330,680,670,700 | Prints some plain text to the console | src/rayoptics/qtgui/ipyconsole.py | print_text | NelisW/ray-optics | python | def print_text(self, text):
'\n \n '
self._append_plain_text(text) |
def execute_command(self, command):
'\n Execute a command in the frame of the console widget\n '
self._execute(command, False) | 2,414,786,539,950,187,500 | Execute a command in the frame of the console widget | src/rayoptics/qtgui/ipyconsole.py | execute_command | NelisW/ray-optics | python | def execute_command(self, command):
'\n \n '
self._execute(command, False) |
def __init__(self, volumes, energies, eos='vinet'):
"Init method.\n\n volumes : array_like\n Unit cell volumes where energies are obtained.\n shape=(volumes, ), dtype='double'.\n energies : array_like\n Energies obtained at volumes.\n shape=(volumes, ), dtype='double'.\n eos : str\n Identifier of equation of states function.\n\n "
self._volumes = volumes
if (np.array(energies).ndim == 1):
self._energies = energies
else:
self._energies = energies[0]
self._eos = get_eos(eos)
self._energy = None
self._bulk_modulus = None
self._b_prime = None
try:
(self._energy, self._bulk_modulus, self._b_prime, self._volume) = fit_to_eos(volumes, self._energies, self._eos)
except TypeError:
msg = [('Failed to fit to "%s" equation of states.' % eos)]
if (len(volumes) < 4):
msg += ['At least 4 volume points are needed for the fitting.']
msg += ['Careful choice of volume points is recommended.']
raise RuntimeError('\n'.join(msg)) | -6,041,298,175,504,865,000 | Init method.
volumes : array_like
Unit cell volumes where energies are obtained.
shape=(volumes, ), dtype='double'.
energies : array_like
Energies obtained at volumes.
shape=(volumes, ), dtype='double'.
eos : str
Identifier of equation of states function. | phonopy/qha/core.py | __init__ | SeyedMohamadMoosavi/phonopy | python | def __init__(self, volumes, energies, eos='vinet'):
"Init method.\n\n volumes : array_like\n Unit cell volumes where energies are obtained.\n shape=(volumes, ), dtype='double'.\n energies : array_like\n Energies obtained at volumes.\n shape=(volumes, ), dtype='double'.\n eos : str\n Identifier of equation of states function.\n\n "
self._volumes = volumes
if (np.array(energies).ndim == 1):
self._energies = energies
else:
self._energies = energies[0]
self._eos = get_eos(eos)
self._energy = None
self._bulk_modulus = None
self._b_prime = None
try:
(self._energy, self._bulk_modulus, self._b_prime, self._volume) = fit_to_eos(volumes, self._energies, self._eos)
except TypeError:
msg = [('Failed to fit to "%s" equation of states.' % eos)]
if (len(volumes) < 4):
msg += ['At least 4 volume points are needed for the fitting.']
msg += ['Careful choice of volume points is recommended.']
raise RuntimeError('\n'.join(msg)) |
@property
def bulk_modulus(self):
'Return bulk modulus.'
return self._bulk_modulus | -2,595,303,705,271,994,000 | Return bulk modulus. | phonopy/qha/core.py | bulk_modulus | SeyedMohamadMoosavi/phonopy | python | @property
def bulk_modulus(self):
return self._bulk_modulus |
def get_bulk_modulus(self):
'Return bulk modulus.'
warnings.warn('BulkModulus.get_bulk_modulus() is deprecated.Use BulkModulus.bulk_modulus attribute.', DeprecationWarning)
return self.bulk_modulus | 6,207,878,060,856,568,000 | Return bulk modulus. | phonopy/qha/core.py | get_bulk_modulus | SeyedMohamadMoosavi/phonopy | python | def get_bulk_modulus(self):
warnings.warn('BulkModulus.get_bulk_modulus() is deprecated.Use BulkModulus.bulk_modulus attribute.', DeprecationWarning)
return self.bulk_modulus |
@property
def equilibrium_volume(self):
'Return volume at equilibrium.'
return self._volume | 4,626,538,268,838,389,000 | Return volume at equilibrium. | phonopy/qha/core.py | equilibrium_volume | SeyedMohamadMoosavi/phonopy | python | @property
def equilibrium_volume(self):
return self._volume |
def get_equilibrium_volume(self):
'Return volume at equilibrium.'
warnings.warn('BulkModulus.get_equilibrium_volume() is deprecated.Use BulkModulus.equilibrium_volume attribute.', DeprecationWarning)
return self.equilibrium_volume | -4,453,012,193,212,020,000 | Return volume at equilibrium. | phonopy/qha/core.py | get_equilibrium_volume | SeyedMohamadMoosavi/phonopy | python | def get_equilibrium_volume(self):
warnings.warn('BulkModulus.get_equilibrium_volume() is deprecated.Use BulkModulus.equilibrium_volume attribute.', DeprecationWarning)
return self.equilibrium_volume |
@property
def b_prime(self):
"Return fitted parameter B'."
return self._b_prime | -6,632,820,823,371,828,000 | Return fitted parameter B'. | phonopy/qha/core.py | b_prime | SeyedMohamadMoosavi/phonopy | python | @property
def b_prime(self):
return self._b_prime |
def get_b_prime(self):
"Return fitted parameter B'."
warnings.warn('BulkModulus.get_b_prime() is deprecated.Use BulkModulus.b_prime attribute.', DeprecationWarning)
return self._b_prime | -8,431,160,056,754,322,000 | Return fitted parameter B'. | phonopy/qha/core.py | get_b_prime | SeyedMohamadMoosavi/phonopy | python | def get_b_prime(self):
warnings.warn('BulkModulus.get_b_prime() is deprecated.Use BulkModulus.b_prime attribute.', DeprecationWarning)
return self._b_prime |
@property
def energy(self):
'Return fitted parameter of energy.'
return self._energy | -1,284,467,124,829,295,900 | Return fitted parameter of energy. | phonopy/qha/core.py | energy | SeyedMohamadMoosavi/phonopy | python | @property
def energy(self):
return self._energy |
def get_energy(self):
'Return fitted parameter of energy.'
warnings.warn('BulkModulus.get_energy() is deprecated.Use BulkModulus.energy attribute.', DeprecationWarning)
return self._energy | -778,690,476,186,869,600 | Return fitted parameter of energy. | phonopy/qha/core.py | get_energy | SeyedMohamadMoosavi/phonopy | python | def get_energy(self):
warnings.warn('BulkModulus.get_energy() is deprecated.Use BulkModulus.energy attribute.', DeprecationWarning)
return self._energy |
def get_parameters(self):
'Return fitted parameters.'
return (self._energy, self._bulk_modulus, self._b_prime, self._volume) | -1,417,737,956,995,839,500 | Return fitted parameters. | phonopy/qha/core.py | get_parameters | SeyedMohamadMoosavi/phonopy | python | def get_parameters(self):
return (self._energy, self._bulk_modulus, self._b_prime, self._volume) |
def get_eos(self):
'Return EOS function as a python method.'
warnings.warn('BulkModulus.get_eos() is deprecated.', DeprecationWarning)
return self._eos | -5,511,940,530,130,078,000 | Return EOS function as a python method. | phonopy/qha/core.py | get_eos | SeyedMohamadMoosavi/phonopy | python | def get_eos(self):
warnings.warn('BulkModulus.get_eos() is deprecated.', DeprecationWarning)
return self._eos |
def plot(self):
'Plot fitted EOS curve.'
import matplotlib.pyplot as plt
ep = self.get_parameters()
vols = self._volumes
volume_points = np.linspace(min(vols), max(vols), 201)
(fig, ax) = plt.subplots()
ax.plot(volume_points, self._eos(volume_points, *ep), 'r-')
ax.plot(vols, self._energies, 'bo', markersize=4)
return plt | 450,993,508,511,894,140 | Plot fitted EOS curve. | phonopy/qha/core.py | plot | SeyedMohamadMoosavi/phonopy | python | def plot(self):
import matplotlib.pyplot as plt
ep = self.get_parameters()
vols = self._volumes
volume_points = np.linspace(min(vols), max(vols), 201)
(fig, ax) = plt.subplots()
ax.plot(volume_points, self._eos(volume_points, *ep), 'r-')
ax.plot(vols, self._energies, 'bo', markersize=4)
return plt |
def __init__(self, volumes, electronic_energies, temperatures, cv, entropy, fe_phonon, eos='vinet', t_max=None, energy_plot_factor=None):
"Init method.\n\n Parameters\n ----------\n volumes: array_like\n Unit cell volumes (V) in angstrom^3.\n dtype='double'\n shape=(volumes,)\n electronic_energies: array_like\n Electronic energies (U_el) or electronic free energies (F_el) in eV.\n It is assumed as formar if ndim==1 and latter if ndim==2.\n dtype='double'\n shape=(volumes,) or (temperatuers, volumes)\n temperatures: array_like\n Temperatures ascending order (T) in K.\n dtype='double'\n shape=(temperatures,)\n cv: array_like\n Phonon Heat capacity at constant volume in J/K/mol.\n dtype='double'\n shape=(temperatuers, volumes)\n entropy: array_like\n Phonon entropy at constant volume (S_ph) in J/K/mol.\n dtype='double'\n shape=(temperatuers, volumes)\n fe_phonon: array_like\n Phonon Helmholtz free energy (F_ph) in kJ/mol.\n dtype='double'\n shape=(temperatuers, volumes)\n eos: str\n Equation of state used for fitting F vs V.\n 'vinet', 'murnaghan' or 'birch_murnaghan'.\n t_max: float\n Maximum temperature to be calculated. This has to be not\n greater than the temperature of the third element from the\n end of 'temperatre' elements. If max_t=None, the temperature\n of the third element from the end is used.\n energy_plot_factor: float\n This value is multiplied to energy like values only in plotting.\n\n "
self._volumes = np.array(volumes)
self._electronic_energies = np.array(electronic_energies)
self._all_temperatures = np.array(temperatures)
self._cv = np.array(cv)
self._entropy = np.array(entropy)
self._fe_phonon = (np.array(fe_phonon) / EvTokJmol)
self._eos = get_eos(eos)
self._t_max = t_max
self._energy_plot_factor = energy_plot_factor
self._temperatures = None
self._equiv_volumes = None
self._equiv_energies = None
self._equiv_bulk_modulus = None
self._equiv_parameters = None
self._free_energies = None
self._num_elems = None
self._thermal_expansions = None
self._cp_numerical = None
self._volume_entropy_parameters = None
self._volume_cv_parameters = None
self._volume_entropy = None
self._volume_cv = None
self._cp_polyfit = None
self._dsdv = None
self._gruneisen_parameters = None
self._len = None | 5,790,105,411,908,088,000 | Init method.
Parameters
----------
volumes: array_like
Unit cell volumes (V) in angstrom^3.
dtype='double'
shape=(volumes,)
electronic_energies: array_like
Electronic energies (U_el) or electronic free energies (F_el) in eV.
It is assumed as formar if ndim==1 and latter if ndim==2.
dtype='double'
shape=(volumes,) or (temperatuers, volumes)
temperatures: array_like
Temperatures ascending order (T) in K.
dtype='double'
shape=(temperatures,)
cv: array_like
Phonon Heat capacity at constant volume in J/K/mol.
dtype='double'
shape=(temperatuers, volumes)
entropy: array_like
Phonon entropy at constant volume (S_ph) in J/K/mol.
dtype='double'
shape=(temperatuers, volumes)
fe_phonon: array_like
Phonon Helmholtz free energy (F_ph) in kJ/mol.
dtype='double'
shape=(temperatuers, volumes)
eos: str
Equation of state used for fitting F vs V.
'vinet', 'murnaghan' or 'birch_murnaghan'.
t_max: float
Maximum temperature to be calculated. This has to be not
greater than the temperature of the third element from the
end of 'temperatre' elements. If max_t=None, the temperature
of the third element from the end is used.
energy_plot_factor: float
This value is multiplied to energy like values only in plotting. | phonopy/qha/core.py | __init__ | SeyedMohamadMoosavi/phonopy | python | def __init__(self, volumes, electronic_energies, temperatures, cv, entropy, fe_phonon, eos='vinet', t_max=None, energy_plot_factor=None):
"Init method.\n\n Parameters\n ----------\n volumes: array_like\n Unit cell volumes (V) in angstrom^3.\n dtype='double'\n shape=(volumes,)\n electronic_energies: array_like\n Electronic energies (U_el) or electronic free energies (F_el) in eV.\n It is assumed as formar if ndim==1 and latter if ndim==2.\n dtype='double'\n shape=(volumes,) or (temperatuers, volumes)\n temperatures: array_like\n Temperatures ascending order (T) in K.\n dtype='double'\n shape=(temperatures,)\n cv: array_like\n Phonon Heat capacity at constant volume in J/K/mol.\n dtype='double'\n shape=(temperatuers, volumes)\n entropy: array_like\n Phonon entropy at constant volume (S_ph) in J/K/mol.\n dtype='double'\n shape=(temperatuers, volumes)\n fe_phonon: array_like\n Phonon Helmholtz free energy (F_ph) in kJ/mol.\n dtype='double'\n shape=(temperatuers, volumes)\n eos: str\n Equation of state used for fitting F vs V.\n 'vinet', 'murnaghan' or 'birch_murnaghan'.\n t_max: float\n Maximum temperature to be calculated. This has to be not\n greater than the temperature of the third element from the\n end of 'temperatre' elements. If max_t=None, the temperature\n of the third element from the end is used.\n energy_plot_factor: float\n This value is multiplied to energy like values only in plotting.\n\n "
self._volumes = np.array(volumes)
self._electronic_energies = np.array(electronic_energies)
self._all_temperatures = np.array(temperatures)
self._cv = np.array(cv)
self._entropy = np.array(entropy)
self._fe_phonon = (np.array(fe_phonon) / EvTokJmol)
self._eos = get_eos(eos)
self._t_max = t_max
self._energy_plot_factor = energy_plot_factor
self._temperatures = None
self._equiv_volumes = None
self._equiv_energies = None
self._equiv_bulk_modulus = None
self._equiv_parameters = None
self._free_energies = None
self._num_elems = None
self._thermal_expansions = None
self._cp_numerical = None
self._volume_entropy_parameters = None
self._volume_cv_parameters = None
self._volume_entropy = None
self._volume_cv = None
self._cp_polyfit = None
self._dsdv = None
self._gruneisen_parameters = None
self._len = None |
@property
def thermal_expansion(self):
'Return volumetric thermal expansion coefficients at temperatures.'
return self._thermal_expansions[:self._len] | 6,564,147,274,133,580,000 | Return volumetric thermal expansion coefficients at temperatures. | phonopy/qha/core.py | thermal_expansion | SeyedMohamadMoosavi/phonopy | python | @property
def thermal_expansion(self):
return self._thermal_expansions[:self._len] |
@property
def helmholtz_volume(self):
'Return Helmholtz free energies at temperatures and volumes.'
return self._free_energies[:self._len] | 5,229,335,254,799,800,000 | Return Helmholtz free energies at temperatures and volumes. | phonopy/qha/core.py | helmholtz_volume | SeyedMohamadMoosavi/phonopy | python | @property
def helmholtz_volume(self):
return self._free_energies[:self._len] |
@property
def volume_temperature(self):
'Return equilibrium volumes at temperatures.'
return self._equiv_volumes[:self._len] | 5,589,368,534,607,171,000 | Return equilibrium volumes at temperatures. | phonopy/qha/core.py | volume_temperature | SeyedMohamadMoosavi/phonopy | python | @property
def volume_temperature(self):
return self._equiv_volumes[:self._len] |
@property
def gibbs_temperature(self):
'Return Gibbs free energies at temperatures.'
return self._equiv_energies[:self._len] | 3,348,669,770,639,401,000 | Return Gibbs free energies at temperatures. | phonopy/qha/core.py | gibbs_temperature | SeyedMohamadMoosavi/phonopy | python | @property
def gibbs_temperature(self):
return self._equiv_energies[:self._len] |
@property
def bulk_modulus_temperature(self):
'Return bulk modulus vs temperature data.'
return self._equiv_bulk_modulus[:self._len] | 284,408,110,806,463,400 | Return bulk modulus vs temperature data. | phonopy/qha/core.py | bulk_modulus_temperature | SeyedMohamadMoosavi/phonopy | python | @property
def bulk_modulus_temperature(self):
return self._equiv_bulk_modulus[:self._len] |
@property
def heat_capacity_P_numerical(self):
'Return heat capacities at constant pressure at temperatures.\n\n Values are computed by numerical derivative of Gibbs free energy.\n\n '
return self._cp_numerical[:self._len] | 5,557,818,067,728,385,000 | Return heat capacities at constant pressure at temperatures.
Values are computed by numerical derivative of Gibbs free energy. | phonopy/qha/core.py | heat_capacity_P_numerical | SeyedMohamadMoosavi/phonopy | python | @property
def heat_capacity_P_numerical(self):
'Return heat capacities at constant pressure at temperatures.\n\n Values are computed by numerical derivative of Gibbs free energy.\n\n '
return self._cp_numerical[:self._len] |
@property
def heat_capacity_P_polyfit(self):
'Return heat capacities at constant pressure at temperatures.\n\n Volumes are computed in another way to heat_capacity_P_numerical\n for the better numerical behaviour. But this does not work\n when temperature dependent electronic_energies is supplied.\n\n '
if (self._electronic_energies.ndim == 1):
return self._cp_polyfit[:self._len]
else:
return None | -8,213,427,396,810,456,000 | Return heat capacities at constant pressure at temperatures.
Volumes are computed in another way to heat_capacity_P_numerical
for the better numerical behaviour. But this does not work
when temperature dependent electronic_energies is supplied. | phonopy/qha/core.py | heat_capacity_P_polyfit | SeyedMohamadMoosavi/phonopy | python | @property
def heat_capacity_P_polyfit(self):
'Return heat capacities at constant pressure at temperatures.\n\n Volumes are computed in another way to heat_capacity_P_numerical\n for the better numerical behaviour. But this does not work\n when temperature dependent electronic_energies is supplied.\n\n '
if (self._electronic_energies.ndim == 1):
return self._cp_polyfit[:self._len]
else:
return None |
@property
def gruneisen_temperature(self):
'Return Gruneisen parameters at temperatures.'
return self._gruneisen_parameters[:self._len] | -6,513,037,188,215,679,000 | Return Gruneisen parameters at temperatures. | phonopy/qha/core.py | gruneisen_temperature | SeyedMohamadMoosavi/phonopy | python | @property
def gruneisen_temperature(self):
return self._gruneisen_parameters[:self._len] |
def run(self, verbose=False):
"Fit parameters to EOS at temperatures.\n\n Even if fitting failed, simply omit the volume point. In this case,\n the failed temperature point doesn't exist in the returned arrays.\n\n "
if verbose:
print((('#%11s' + ('%14s' * 4)) % ('T', 'E_0', 'B_0', "B'_0", 'V_0')))
num_elems = (self._get_num_elems(self._all_temperatures) + 1)
if (num_elems > len(self._all_temperatures)):
num_elems -= 1
temperatures = []
parameters = []
free_energies = []
for i in range(num_elems):
if (self._electronic_energies.ndim == 1):
el_energy = self._electronic_energies
else:
el_energy = self._electronic_energies[i]
fe = [(ph_e + el_e) for (ph_e, el_e) in zip(self._fe_phonon[i], el_energy)]
try:
ep = fit_to_eos(self._volumes, fe, self._eos)
except TypeError:
print(('Fitting failure at T=%.1f' % self._all_temperatures[i]))
if (ep is None):
continue
else:
[ee, eb, ebp, ev] = ep
t = self._all_temperatures[i]
temperatures.append(t)
parameters.append(ep)
free_energies.append(fe)
if verbose:
print((('%14.6f' * 5) % (t, ep[0], (ep[1] * EVAngstromToGPa), ep[2], ep[3])))
self._free_energies = np.array(free_energies)
self._temperatures = np.array(temperatures)
self._equiv_parameters = np.array(parameters)
self._equiv_volumes = np.array(self._equiv_parameters[:, 3])
self._equiv_energies = np.array(self._equiv_parameters[:, 0])
self._equiv_bulk_modulus = np.array((self._equiv_parameters[:, 1] * EVAngstromToGPa))
self._num_elems = len(self._temperatures)
self._set_thermal_expansion()
self._set_heat_capacity_P_numerical()
self._set_heat_capacity_P_polyfit()
self._set_gruneisen_parameter()
self._len = len(self._thermal_expansions)
assert ((self._len + 1) == self._num_elems) | -7,183,663,002,175,828,000 | Fit parameters to EOS at temperatures.
Even if fitting failed, simply omit the volume point. In this case,
the failed temperature point doesn't exist in the returned arrays. | phonopy/qha/core.py | run | SeyedMohamadMoosavi/phonopy | python | def run(self, verbose=False):
"Fit parameters to EOS at temperatures.\n\n Even if fitting failed, simply omit the volume point. In this case,\n the failed temperature point doesn't exist in the returned arrays.\n\n "
if verbose:
print((('#%11s' + ('%14s' * 4)) % ('T', 'E_0', 'B_0', "B'_0", 'V_0')))
num_elems = (self._get_num_elems(self._all_temperatures) + 1)
if (num_elems > len(self._all_temperatures)):
num_elems -= 1
temperatures = []
parameters = []
free_energies = []
for i in range(num_elems):
if (self._electronic_energies.ndim == 1):
el_energy = self._electronic_energies
else:
el_energy = self._electronic_energies[i]
fe = [(ph_e + el_e) for (ph_e, el_e) in zip(self._fe_phonon[i], el_energy)]
try:
ep = fit_to_eos(self._volumes, fe, self._eos)
except TypeError:
print(('Fitting failure at T=%.1f' % self._all_temperatures[i]))
if (ep is None):
continue
else:
[ee, eb, ebp, ev] = ep
t = self._all_temperatures[i]
temperatures.append(t)
parameters.append(ep)
free_energies.append(fe)
if verbose:
print((('%14.6f' * 5) % (t, ep[0], (ep[1] * EVAngstromToGPa), ep[2], ep[3])))
self._free_energies = np.array(free_energies)
self._temperatures = np.array(temperatures)
self._equiv_parameters = np.array(parameters)
self._equiv_volumes = np.array(self._equiv_parameters[:, 3])
self._equiv_energies = np.array(self._equiv_parameters[:, 0])
self._equiv_bulk_modulus = np.array((self._equiv_parameters[:, 1] * EVAngstromToGPa))
self._num_elems = len(self._temperatures)
self._set_thermal_expansion()
self._set_heat_capacity_P_numerical()
self._set_heat_capacity_P_polyfit()
self._set_gruneisen_parameter()
self._len = len(self._thermal_expansions)
assert ((self._len + 1) == self._num_elems) |
def get_field_type(field, include_role=True):
'\n Get the type of a field including the correct intersphinx mappings.\n\n :param field: The field\n :type field: ~django.db.models.Field\n\n :param include_directive: Whether or not the role :any:`py:class` should be included\n :type include_directive: bool\n\n :return: The type of the field\n :rtype: str\n '
if isinstance(field, models.fields.related.RelatedField):
if isinstance(field.remote_field.model, str):
to = field.remote_field.model
else:
to = f'{field.remote_field.model.__module__}.{field.remote_field.model.__name__}'
return f':class:`~{type(field).__module__}.{type(field).__name__}` to :class:`~{to}`'
elif isinstance(field, models.fields.reverse_related.ForeignObjectRel):
to = field.remote_field.model
return f'Reverse :class:`~{type(field.remote_field).__module__}.{type(field.remote_field).__name__}` from :class:`~{to.__module__}.{to.__name__}`'
elif include_role:
return f':class:`~{type(field).__module__}.{type(field).__name__}`'
else:
return f'~{type(field).__module__}.{type(field).__name__}' | 7,352,132,743,437,656,000 | Get the type of a field including the correct intersphinx mappings.
:param field: The field
:type field: ~django.db.models.Field
:param include_directive: Whether or not the role :any:`py:class` should be included
:type include_directive: bool
:return: The type of the field
:rtype: str | sphinxcontrib_django2/docstrings/field_utils.py | get_field_type | mkalioby/sphinxcontrib-django2 | python | def get_field_type(field, include_role=True):
'\n Get the type of a field including the correct intersphinx mappings.\n\n :param field: The field\n :type field: ~django.db.models.Field\n\n :param include_directive: Whether or not the role :any:`py:class` should be included\n :type include_directive: bool\n\n :return: The type of the field\n :rtype: str\n '
if isinstance(field, models.fields.related.RelatedField):
if isinstance(field.remote_field.model, str):
to = field.remote_field.model
else:
to = f'{field.remote_field.model.__module__}.{field.remote_field.model.__name__}'
return f':class:`~{type(field).__module__}.{type(field).__name__}` to :class:`~{to}`'
elif isinstance(field, models.fields.reverse_related.ForeignObjectRel):
to = field.remote_field.model
return f'Reverse :class:`~{type(field.remote_field).__module__}.{type(field.remote_field).__name__}` from :class:`~{to.__module__}.{to.__name__}`'
elif include_role:
return f':class:`~{type(field).__module__}.{type(field).__name__}`'
else:
return f'~{type(field).__module__}.{type(field).__name__}' |
def get_field_verbose_name(field):
'\n Get the verbose name of the field.\n If the field has a ``help_text``, it is also included.\n\n In case the field is a related field, the ``related_name`` is used to link to the remote model.\n For reverse related fields, the originating field is linked.\n\n :param field: The field\n :type field: ~django.db.models.Field\n '
help_text = ''
if isinstance(field, models.fields.reverse_related.ForeignObjectRel):
related_name = (field.related_name.replace('_', ' ') if field.related_name else None)
if isinstance(field, models.fields.reverse_related.OneToOneRel):
related_name = (related_name or field.remote_field.model._meta.verbose_name)
verbose_name = f'The {related_name} of this {field.model._meta.verbose_name}'
else:
related_name = (related_name or field.remote_field.model._meta.verbose_name_plural)
verbose_name = f'All {related_name} of this {field.model._meta.verbose_name}'
verbose_name += f' (related name of :attr:`~{field.remote_field.model.__module__}.{field.remote_field.model.__name__}.{field.remote_field.name}`)'
elif isinstance(field, contenttypes.fields.GenericForeignKey):
return f'Generic foreign key to the :class:`~django.contrib.contenttypes.models.ContentType` specified in :attr:`~{field.model.__module__}.{field.model.__name__}.{field.ct_field}`'
else:
primary_key = ('Primary key: ' if field.primary_key else '')
field_verbose_name = force_str(field.verbose_name)
verbose_name = ((primary_key + field_verbose_name[:1].upper()) + field_verbose_name[1:])
help_text = force_str(field.help_text)
if help_text:
if (not verbose_name.endswith('.')):
verbose_name += '. '
verbose_name += help_text
if isinstance(field, models.fields.related.RelatedField):
to = field.remote_field.model
if isinstance(to, str):
if ('.' in to):
to = apps.get_model(to)
elif (to == 'self'):
to = field.model
else:
to = apps.get_model(field.model._meta.app_label, to)
if hasattr(field.remote_field, 'related_name'):
related_name = (field.remote_field.related_name or field.model.__name__.lower())
verbose_name += f' (related name: :attr:`~{to.__module__}.{to.__name__}.{related_name}`)'
return verbose_name | 8,593,726,823,584,610,000 | Get the verbose name of the field.
If the field has a ``help_text``, it is also included.
In case the field is a related field, the ``related_name`` is used to link to the remote model.
For reverse related fields, the originating field is linked.
:param field: The field
:type field: ~django.db.models.Field | sphinxcontrib_django2/docstrings/field_utils.py | get_field_verbose_name | mkalioby/sphinxcontrib-django2 | python | def get_field_verbose_name(field):
'\n Get the verbose name of the field.\n If the field has a ``help_text``, it is also included.\n\n In case the field is a related field, the ``related_name`` is used to link to the remote model.\n For reverse related fields, the originating field is linked.\n\n :param field: The field\n :type field: ~django.db.models.Field\n '
help_text =
if isinstance(field, models.fields.reverse_related.ForeignObjectRel):
related_name = (field.related_name.replace('_', ' ') if field.related_name else None)
if isinstance(field, models.fields.reverse_related.OneToOneRel):
related_name = (related_name or field.remote_field.model._meta.verbose_name)
verbose_name = f'The {related_name} of this {field.model._meta.verbose_name}'
else:
related_name = (related_name or field.remote_field.model._meta.verbose_name_plural)
verbose_name = f'All {related_name} of this {field.model._meta.verbose_name}'
verbose_name += f' (related name of :attr:`~{field.remote_field.model.__module__}.{field.remote_field.model.__name__}.{field.remote_field.name}`)'
elif isinstance(field, contenttypes.fields.GenericForeignKey):
return f'Generic foreign key to the :class:`~django.contrib.contenttypes.models.ContentType` specified in :attr:`~{field.model.__module__}.{field.model.__name__}.{field.ct_field}`'
else:
primary_key = ('Primary key: ' if field.primary_key else )
field_verbose_name = force_str(field.verbose_name)
verbose_name = ((primary_key + field_verbose_name[:1].upper()) + field_verbose_name[1:])
help_text = force_str(field.help_text)
if help_text:
if (not verbose_name.endswith('.')):
verbose_name += '. '
verbose_name += help_text
if isinstance(field, models.fields.related.RelatedField):
to = field.remote_field.model
if isinstance(to, str):
if ('.' in to):
to = apps.get_model(to)
elif (to == 'self'):
to = field.model
else:
to = apps.get_model(field.model._meta.app_label, to)
if hasattr(field.remote_field, 'related_name'):
related_name = (field.remote_field.related_name or field.model.__name__.lower())
verbose_name += f' (related name: :attr:`~{to.__module__}.{to.__name__}.{related_name}`)'
return verbose_name |
def fact(name=None):
'Output a fact about factorials.\n\n Args:\n name: A name for the operation (optional).\n\n Returns:\n A `Tensor` of type `string`.\n '
_ctx = _context._context
if ((_ctx is None) or (not _ctx._eager_context.is_eager)):
(_, _, _op) = _op_def_lib._apply_op_helper('Fact', name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = None
_execute.record_gradient('Fact', _inputs_flat, _attrs, _result, name)
(_result,) = _result
return _result
else:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(_ctx._context_handle, _ctx._eager_context.device_name, 'Fact', name, _ctx._post_execution_callbacks)
return _result
except _core._FallbackException:
return fact_eager_fallback(name=name, ctx=_ctx)
except _core._NotOkStatusException as e:
if (name is not None):
message = ((e.message + ' name: ') + name)
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None) | -2,174,702,402,554,505,700 | Output a fact about factorials.
Args:
name: A name for the operation (optional).
Returns:
A `Tensor` of type `string`. | venv/Lib/site-packages/tensorflow/python/ops/gen_user_ops.py | fact | caiovini/Image_reader_api | python | def fact(name=None):
'Output a fact about factorials.\n\n Args:\n name: A name for the operation (optional).\n\n Returns:\n A `Tensor` of type `string`.\n '
_ctx = _context._context
if ((_ctx is None) or (not _ctx._eager_context.is_eager)):
(_, _, _op) = _op_def_lib._apply_op_helper('Fact', name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = None
_execute.record_gradient('Fact', _inputs_flat, _attrs, _result, name)
(_result,) = _result
return _result
else:
try:
_result = _pywrap_tensorflow.TFE_Py_FastPathExecute(_ctx._context_handle, _ctx._eager_context.device_name, 'Fact', name, _ctx._post_execution_callbacks)
return _result
except _core._FallbackException:
return fact_eager_fallback(name=name, ctx=_ctx)
except _core._NotOkStatusException as e:
if (name is not None):
message = ((e.message + ' name: ') + name)
else:
message = e.message
_six.raise_from(_core._status_to_exception(e.code, message), None) |
def fact_eager_fallback(name=None, ctx=None):
'This is the slowpath function for Eager mode.\n This is for function fact\n '
_ctx = (ctx if ctx else _context.context())
_inputs_flat = []
_attrs = None
_result = _execute.execute(b'Fact', 1, inputs=_inputs_flat, attrs=_attrs, ctx=_ctx, name=name)
_execute.record_gradient('Fact', _inputs_flat, _attrs, _result, name)
(_result,) = _result
return _result | 8,948,339,619,584,408,000 | This is the slowpath function for Eager mode.
This is for function fact | venv/Lib/site-packages/tensorflow/python/ops/gen_user_ops.py | fact_eager_fallback | caiovini/Image_reader_api | python | def fact_eager_fallback(name=None, ctx=None):
'This is the slowpath function for Eager mode.\n This is for function fact\n '
_ctx = (ctx if ctx else _context.context())
_inputs_flat = []
_attrs = None
_result = _execute.execute(b'Fact', 1, inputs=_inputs_flat, attrs=_attrs, ctx=_ctx, name=name)
_execute.record_gradient('Fact', _inputs_flat, _attrs, _result, name)
(_result,) = _result
return _result |
def introduction():
'Prints out introductory statements at start of run.'
print('Lennard-Jones potential')
print('Cut-and-shifted version for dynamics')
print('Cut (but not shifted) version also calculated')
print('Diameter, sigma = 1')
print('Well depth, epsilon = 1')
if fast:
print('Fast NumPy force routine')
else:
print('Slow Python force routine') | -3,767,502,228,122,722,300 | Prints out introductory statements at start of run. | python_examples/md_lj_module.py | introduction | Allen-Tildesley/examples | python | def introduction():
print('Lennard-Jones potential')
print('Cut-and-shifted version for dynamics')
print('Cut (but not shifted) version also calculated')
print('Diameter, sigma = 1')
print('Well depth, epsilon = 1')
if fast:
print('Fast NumPy force routine')
else:
print('Slow Python force routine') |
def conclusion():
'Prints out concluding statements at end of run.'
print('Program ends') | 7,885,841,679,723,434,000 | Prints out concluding statements at end of run. | python_examples/md_lj_module.py | conclusion | Allen-Tildesley/examples | python | def conclusion():
print('Program ends') |
def force(box, r_cut, r):
'Takes in box, cutoff range, and coordinate array, and calculates forces and potentials etc.'
import numpy as np
(n, d) = r.shape
assert (d == 3), 'Dimension error in force'
sr2_ovr = 1.77
r_cut_box = (r_cut / box)
r_cut_box_sq = (r_cut_box ** 2)
box_sq = (box ** 2)
sr2 = (1.0 / (r_cut ** 2))
sr6 = (sr2 ** 3)
sr12 = (sr6 ** 2)
pot_cut = (sr12 - sr6)
f = np.zeros_like(r)
total = PotentialType(cut=0.0, pot=0.0, vir=0.0, lap=0.0, ovr=False)
if fast:
for i in range((n - 1)):
rij = (r[i, :] - r[(i + 1):, :])
rij = (rij - np.rint(rij))
rij_sq = np.sum((rij ** 2), axis=1)
in_range = (rij_sq < r_cut_box_sq)
rij_sq = (rij_sq * box_sq)
rij = (rij * box)
sr2 = np.where(in_range, (1.0 / rij_sq), 0.0)
ovr = (sr2 > sr2_ovr)
sr6 = (sr2 ** 3)
sr12 = (sr6 ** 2)
cut = (sr12 - sr6)
vir = (cut + sr12)
pot = np.where(in_range, (cut - pot_cut), 0.0)
lap = (((22.0 * sr12) - (5.0 * sr6)) * sr2)
fij = (vir * sr2)
fij = (rij * fij[:, np.newaxis])
total = (total + PotentialType(cut=np.sum(cut), pot=np.sum(pot), vir=np.sum(vir), lap=np.sum(lap), ovr=np.any(ovr)))
f[i, :] = (f[i, :] + np.sum(fij, axis=0))
f[(i + 1):, :] = (f[(i + 1):, :] - fij)
else:
for i in range((n - 1)):
for j in range((i + 1), n):
rij = (r[i, :] - r[j, :])
rij = (rij - np.rint(rij))
rij_sq = np.sum((rij ** 2))
if (rij_sq < r_cut_box_sq):
rij_sq = (rij_sq * box_sq)
rij = (rij * box)
sr2 = (1.0 / rij_sq)
ovr = (sr2 > sr2_ovr)
sr6 = (sr2 ** 3)
sr12 = (sr6 ** 2)
cut = (sr12 - sr6)
vir = (cut + sr12)
pot = (cut - pot_cut)
lap = (((22.0 * sr12) - (5.0 * sr6)) * sr2)
fij = ((rij * vir) * sr2)
total = (total + PotentialType(cut=cut, pot=pot, vir=vir, lap=lap, ovr=ovr))
f[i, :] = (f[i, :] + fij)
f[j, :] = (f[j, :] - fij)
f = (f * 24.0)
total.cut = (total.cut * 4.0)
total.pot = (total.pot * 4.0)
total.vir = ((total.vir * 24.0) / 3.0)
total.lap = ((total.lap * 24.0) * 2.0)
return (total, f) | -3,670,322,586,466,179,000 | Takes in box, cutoff range, and coordinate array, and calculates forces and potentials etc. | python_examples/md_lj_module.py | force | Allen-Tildesley/examples | python | def force(box, r_cut, r):
import numpy as np
(n, d) = r.shape
assert (d == 3), 'Dimension error in force'
sr2_ovr = 1.77
r_cut_box = (r_cut / box)
r_cut_box_sq = (r_cut_box ** 2)
box_sq = (box ** 2)
sr2 = (1.0 / (r_cut ** 2))
sr6 = (sr2 ** 3)
sr12 = (sr6 ** 2)
pot_cut = (sr12 - sr6)
f = np.zeros_like(r)
total = PotentialType(cut=0.0, pot=0.0, vir=0.0, lap=0.0, ovr=False)
if fast:
for i in range((n - 1)):
rij = (r[i, :] - r[(i + 1):, :])
rij = (rij - np.rint(rij))
rij_sq = np.sum((rij ** 2), axis=1)
in_range = (rij_sq < r_cut_box_sq)
rij_sq = (rij_sq * box_sq)
rij = (rij * box)
sr2 = np.where(in_range, (1.0 / rij_sq), 0.0)
ovr = (sr2 > sr2_ovr)
sr6 = (sr2 ** 3)
sr12 = (sr6 ** 2)
cut = (sr12 - sr6)
vir = (cut + sr12)
pot = np.where(in_range, (cut - pot_cut), 0.0)
lap = (((22.0 * sr12) - (5.0 * sr6)) * sr2)
fij = (vir * sr2)
fij = (rij * fij[:, np.newaxis])
total = (total + PotentialType(cut=np.sum(cut), pot=np.sum(pot), vir=np.sum(vir), lap=np.sum(lap), ovr=np.any(ovr)))
f[i, :] = (f[i, :] + np.sum(fij, axis=0))
f[(i + 1):, :] = (f[(i + 1):, :] - fij)
else:
for i in range((n - 1)):
for j in range((i + 1), n):
rij = (r[i, :] - r[j, :])
rij = (rij - np.rint(rij))
rij_sq = np.sum((rij ** 2))
if (rij_sq < r_cut_box_sq):
rij_sq = (rij_sq * box_sq)
rij = (rij * box)
sr2 = (1.0 / rij_sq)
ovr = (sr2 > sr2_ovr)
sr6 = (sr2 ** 3)
sr12 = (sr6 ** 2)
cut = (sr12 - sr6)
vir = (cut + sr12)
pot = (cut - pot_cut)
lap = (((22.0 * sr12) - (5.0 * sr6)) * sr2)
fij = ((rij * vir) * sr2)
total = (total + PotentialType(cut=cut, pot=pot, vir=vir, lap=lap, ovr=ovr))
f[i, :] = (f[i, :] + fij)
f[j, :] = (f[j, :] - fij)
f = (f * 24.0)
total.cut = (total.cut * 4.0)
total.pot = (total.pot * 4.0)
total.vir = ((total.vir * 24.0) / 3.0)
total.lap = ((total.lap * 24.0) * 2.0)
return (total, f) |
def hessian(box, r_cut, r, f):
'Calculates Hessian function (for 1/N correction to config temp).'
import numpy as np
(n, d) = r.shape
assert (d == 3), 'Dimension error in hessian'
assert np.all((r.shape == f.shape)), 'Dimension mismatch in hessian'
r_cut_box = (r_cut / box)
r_cut_box_sq = (r_cut_box ** 2)
box_sq = (box ** 2)
hes = 0.0
if fast:
for i in range((n - 1)):
rij = (r[i, :] - r[(i + 1):, :])
rij = (rij - np.rint(rij))
rij_sq = np.sum((rij ** 2), axis=1)
in_range = (rij_sq < r_cut_box_sq)
rij_sq = (rij_sq * box_sq)
rij = (rij * box)
fij = (f[i, :] - f[(i + 1):, :])
ff = np.sum((fij * fij), axis=1)
rf = np.sum((rij * fij), axis=1)
sr2 = np.where(in_range, (1.0 / rij_sq), 0.0)
sr6 = (sr2 ** 3)
sr8 = (sr6 * sr2)
sr10 = (sr8 * sr2)
v1 = ((24.0 * (1.0 - (2.0 * sr6))) * sr8)
v2 = ((96.0 * ((7.0 * sr6) - 2.0)) * sr10)
hes = ((hes + np.sum((v1 * ff))) + np.sum((v2 * (rf ** 2))))
else:
for i in range((n - 1)):
for j in range((i + 1), n):
rij = (r[i, :] - r[j, :])
rij = (rij - np.rint(rij))
rij_sq = np.sum((rij ** 2))
if (rij_sq < r_cut_box_sq):
rij_sq = (rij_sq * box_sq)
rij = (rij * box)
fij = (f[i, :] - f[j, :])
ff = np.dot(fij, fij)
rf = np.dot(rij, fij)
sr2 = (1.0 / rij_sq)
sr6 = (sr2 ** 3)
sr8 = (sr6 * sr2)
sr10 = (sr8 * sr2)
v1 = ((24.0 * (1.0 - (2.0 * sr6))) * sr8)
v2 = ((96.0 * ((7.0 * sr6) - 2.0)) * sr10)
hes = ((hes + (v1 * ff)) + (v2 * (rf ** 2)))
return hes | 2,891,517,025,663,908,000 | Calculates Hessian function (for 1/N correction to config temp). | python_examples/md_lj_module.py | hessian | Allen-Tildesley/examples | python | def hessian(box, r_cut, r, f):
import numpy as np
(n, d) = r.shape
assert (d == 3), 'Dimension error in hessian'
assert np.all((r.shape == f.shape)), 'Dimension mismatch in hessian'
r_cut_box = (r_cut / box)
r_cut_box_sq = (r_cut_box ** 2)
box_sq = (box ** 2)
hes = 0.0
if fast:
for i in range((n - 1)):
rij = (r[i, :] - r[(i + 1):, :])
rij = (rij - np.rint(rij))
rij_sq = np.sum((rij ** 2), axis=1)
in_range = (rij_sq < r_cut_box_sq)
rij_sq = (rij_sq * box_sq)
rij = (rij * box)
fij = (f[i, :] - f[(i + 1):, :])
ff = np.sum((fij * fij), axis=1)
rf = np.sum((rij * fij), axis=1)
sr2 = np.where(in_range, (1.0 / rij_sq), 0.0)
sr6 = (sr2 ** 3)
sr8 = (sr6 * sr2)
sr10 = (sr8 * sr2)
v1 = ((24.0 * (1.0 - (2.0 * sr6))) * sr8)
v2 = ((96.0 * ((7.0 * sr6) - 2.0)) * sr10)
hes = ((hes + np.sum((v1 * ff))) + np.sum((v2 * (rf ** 2))))
else:
for i in range((n - 1)):
for j in range((i + 1), n):
rij = (r[i, :] - r[j, :])
rij = (rij - np.rint(rij))
rij_sq = np.sum((rij ** 2))
if (rij_sq < r_cut_box_sq):
rij_sq = (rij_sq * box_sq)
rij = (rij * box)
fij = (f[i, :] - f[j, :])
ff = np.dot(fij, fij)
rf = np.dot(rij, fij)
sr2 = (1.0 / rij_sq)
sr6 = (sr2 ** 3)
sr8 = (sr6 * sr2)
sr10 = (sr8 * sr2)
v1 = ((24.0 * (1.0 - (2.0 * sr6))) * sr8)
v2 = ((96.0 * ((7.0 * sr6) - 2.0)) * sr10)
hes = ((hes + (v1 * ff)) + (v2 * (rf ** 2)))
return hes |
def test_scaler():
'Test methods of Scaler\n '
raw = io.read_raw_fif(raw_fname, preload=False)
events = read_events(event_name)
picks = pick_types(raw.info, meg=True, stim=False, ecg=False, eog=False, exclude='bads')
picks = picks[1:13:3]
epochs = Epochs(raw, events, event_id, tmin, tmax, picks=picks, baseline=(None, 0), preload=True)
epochs_data = epochs.get_data()
scaler = Scaler(epochs.info)
y = epochs.events[:, (- 1)]
with warnings.catch_warnings(record=True):
X = scaler.fit_transform(epochs_data, y)
assert_true((X.shape == epochs_data.shape))
X2 = scaler.fit(epochs_data, y).transform(epochs_data)
assert_array_equal(X2, X)
with warnings.catch_warnings(record=True):
Xi = scaler.inverse_transform(X, y)
assert_array_equal(epochs_data, Xi)
assert_raises(ValueError, scaler.fit, epochs, y)
assert_raises(ValueError, scaler.transform, epochs, y) | 5,428,728,073,411,530,000 | Test methods of Scaler | mne/decoding/tests/test_transformer.py | test_scaler | ARudiuk/mne-python | python | def test_scaler():
'\n '
raw = io.read_raw_fif(raw_fname, preload=False)
events = read_events(event_name)
picks = pick_types(raw.info, meg=True, stim=False, ecg=False, eog=False, exclude='bads')
picks = picks[1:13:3]
epochs = Epochs(raw, events, event_id, tmin, tmax, picks=picks, baseline=(None, 0), preload=True)
epochs_data = epochs.get_data()
scaler = Scaler(epochs.info)
y = epochs.events[:, (- 1)]
with warnings.catch_warnings(record=True):
X = scaler.fit_transform(epochs_data, y)
assert_true((X.shape == epochs_data.shape))
X2 = scaler.fit(epochs_data, y).transform(epochs_data)
assert_array_equal(X2, X)
with warnings.catch_warnings(record=True):
Xi = scaler.inverse_transform(X, y)
assert_array_equal(epochs_data, Xi)
assert_raises(ValueError, scaler.fit, epochs, y)
assert_raises(ValueError, scaler.transform, epochs, y) |
def test_filterestimator():
'Test methods of FilterEstimator\n '
raw = io.read_raw_fif(raw_fname, preload=False)
events = read_events(event_name)
picks = pick_types(raw.info, meg=True, stim=False, ecg=False, eog=False, exclude='bads')
picks = picks[1:13:3]
epochs = Epochs(raw, events, event_id, tmin, tmax, picks=picks, baseline=(None, 0), preload=True)
epochs_data = epochs.get_data()
filt = FilterEstimator(epochs.info, l_freq=1, h_freq=40)
y = epochs.events[:, (- 1)]
with warnings.catch_warnings(record=True):
X = filt.fit_transform(epochs_data, y)
assert_true((X.shape == epochs_data.shape))
assert_array_equal(filt.fit(epochs_data, y).transform(epochs_data), X)
filt = FilterEstimator(epochs.info, l_freq=0, h_freq=40)
y = epochs.events[:, (- 1)]
with warnings.catch_warnings(record=True):
X = filt.fit_transform(epochs_data, y)
filt = FilterEstimator(epochs.info, l_freq=1, h_freq=1)
y = epochs.events[:, (- 1)]
with warnings.catch_warnings(record=True):
assert_raises(ValueError, filt.fit_transform, epochs_data, y)
filt = FilterEstimator(epochs.info, l_freq=1, h_freq=None)
with warnings.catch_warnings(record=True):
X = filt.fit_transform(epochs_data, y)
assert_raises(ValueError, filt.fit, epochs, y)
assert_raises(ValueError, filt.transform, epochs, y) | -8,625,130,022,647,989,000 | Test methods of FilterEstimator | mne/decoding/tests/test_transformer.py | test_filterestimator | ARudiuk/mne-python | python | def test_filterestimator():
'\n '
raw = io.read_raw_fif(raw_fname, preload=False)
events = read_events(event_name)
picks = pick_types(raw.info, meg=True, stim=False, ecg=False, eog=False, exclude='bads')
picks = picks[1:13:3]
epochs = Epochs(raw, events, event_id, tmin, tmax, picks=picks, baseline=(None, 0), preload=True)
epochs_data = epochs.get_data()
filt = FilterEstimator(epochs.info, l_freq=1, h_freq=40)
y = epochs.events[:, (- 1)]
with warnings.catch_warnings(record=True):
X = filt.fit_transform(epochs_data, y)
assert_true((X.shape == epochs_data.shape))
assert_array_equal(filt.fit(epochs_data, y).transform(epochs_data), X)
filt = FilterEstimator(epochs.info, l_freq=0, h_freq=40)
y = epochs.events[:, (- 1)]
with warnings.catch_warnings(record=True):
X = filt.fit_transform(epochs_data, y)
filt = FilterEstimator(epochs.info, l_freq=1, h_freq=1)
y = epochs.events[:, (- 1)]
with warnings.catch_warnings(record=True):
assert_raises(ValueError, filt.fit_transform, epochs_data, y)
filt = FilterEstimator(epochs.info, l_freq=1, h_freq=None)
with warnings.catch_warnings(record=True):
X = filt.fit_transform(epochs_data, y)
assert_raises(ValueError, filt.fit, epochs, y)
assert_raises(ValueError, filt.transform, epochs, y) |
def test_psdestimator():
'Test methods of PSDEstimator\n '
raw = io.read_raw_fif(raw_fname, preload=False)
events = read_events(event_name)
picks = pick_types(raw.info, meg=True, stim=False, ecg=False, eog=False, exclude='bads')
picks = picks[1:13:3]
epochs = Epochs(raw, events, event_id, tmin, tmax, picks=picks, baseline=(None, 0), preload=True)
epochs_data = epochs.get_data()
psd = PSDEstimator((2 * np.pi), 0, np.inf)
y = epochs.events[:, (- 1)]
X = psd.fit_transform(epochs_data, y)
assert_true((X.shape[0] == epochs_data.shape[0]))
assert_array_equal(psd.fit(epochs_data, y).transform(epochs_data), X)
assert_raises(ValueError, psd.fit, epochs, y)
assert_raises(ValueError, psd.transform, epochs, y) | -5,691,515,387,998,737,000 | Test methods of PSDEstimator | mne/decoding/tests/test_transformer.py | test_psdestimator | ARudiuk/mne-python | python | def test_psdestimator():
'\n '
raw = io.read_raw_fif(raw_fname, preload=False)
events = read_events(event_name)
picks = pick_types(raw.info, meg=True, stim=False, ecg=False, eog=False, exclude='bads')
picks = picks[1:13:3]
epochs = Epochs(raw, events, event_id, tmin, tmax, picks=picks, baseline=(None, 0), preload=True)
epochs_data = epochs.get_data()
psd = PSDEstimator((2 * np.pi), 0, np.inf)
y = epochs.events[:, (- 1)]
X = psd.fit_transform(epochs_data, y)
assert_true((X.shape[0] == epochs_data.shape[0]))
assert_array_equal(psd.fit(epochs_data, y).transform(epochs_data), X)
assert_raises(ValueError, psd.fit, epochs, y)
assert_raises(ValueError, psd.transform, epochs, y) |
def test_epochs_vectorizer():
'Test methods of EpochsVectorizer\n '
raw = io.read_raw_fif(raw_fname, preload=False)
events = read_events(event_name)
picks = pick_types(raw.info, meg=True, stim=False, ecg=False, eog=False, exclude='bads')
picks = picks[1:13:3]
with warnings.catch_warnings(record=True):
epochs = Epochs(raw, events, event_id, tmin, tmax, picks=picks, baseline=(None, 0), preload=True)
epochs_data = epochs.get_data()
vector = EpochsVectorizer(epochs.info)
y = epochs.events[:, (- 1)]
X = vector.fit_transform(epochs_data, y)
assert_true((X.shape[0] == epochs_data.shape[0]))
assert_true((X.shape[1] == (epochs_data.shape[1] * epochs_data.shape[2])))
assert_array_equal(vector.fit(epochs_data, y).transform(epochs_data), X)
n_times = epochs_data.shape[2]
assert_array_equal(epochs_data[0, 0, 0:n_times], X[0, 0:n_times])
Xi = vector.inverse_transform(X, y)
assert_true((Xi.shape[0] == epochs_data.shape[0]))
assert_true((Xi.shape[1] == epochs_data.shape[1]))
assert_array_equal(epochs_data[0, 0, 0:n_times], Xi[0, 0, 0:n_times])
Xi = vector.inverse_transform(epochs_data[0], y)
assert_true((Xi.shape[1] == epochs_data.shape[1]))
assert_true((Xi.shape[2] == epochs_data.shape[2]))
assert_raises(ValueError, vector.fit, epochs, y)
assert_raises(ValueError, vector.transform, epochs, y) | -3,315,928,520,592,674,000 | Test methods of EpochsVectorizer | mne/decoding/tests/test_transformer.py | test_epochs_vectorizer | ARudiuk/mne-python | python | def test_epochs_vectorizer():
'\n '
raw = io.read_raw_fif(raw_fname, preload=False)
events = read_events(event_name)
picks = pick_types(raw.info, meg=True, stim=False, ecg=False, eog=False, exclude='bads')
picks = picks[1:13:3]
with warnings.catch_warnings(record=True):
epochs = Epochs(raw, events, event_id, tmin, tmax, picks=picks, baseline=(None, 0), preload=True)
epochs_data = epochs.get_data()
vector = EpochsVectorizer(epochs.info)
y = epochs.events[:, (- 1)]
X = vector.fit_transform(epochs_data, y)
assert_true((X.shape[0] == epochs_data.shape[0]))
assert_true((X.shape[1] == (epochs_data.shape[1] * epochs_data.shape[2])))
assert_array_equal(vector.fit(epochs_data, y).transform(epochs_data), X)
n_times = epochs_data.shape[2]
assert_array_equal(epochs_data[0, 0, 0:n_times], X[0, 0:n_times])
Xi = vector.inverse_transform(X, y)
assert_true((Xi.shape[0] == epochs_data.shape[0]))
assert_true((Xi.shape[1] == epochs_data.shape[1]))
assert_array_equal(epochs_data[0, 0, 0:n_times], Xi[0, 0, 0:n_times])
Xi = vector.inverse_transform(epochs_data[0], y)
assert_true((Xi.shape[1] == epochs_data.shape[1]))
assert_true((Xi.shape[2] == epochs_data.shape[2]))
assert_raises(ValueError, vector.fit, epochs, y)
assert_raises(ValueError, vector.transform, epochs, y) |
def export_transcripts(adapter, build='37'):
'Export all transcripts from the database\n\n Args:\n adapter(scout.adapter.MongoAdapter)\n build(str)\n\n Yields:\n transcript(scout.models.Transcript)\n '
LOG.info('Exporting all transcripts')
for tx_obj in adapter.transcripts(build=build):
(yield tx_obj) | -9,093,368,864,162,328,000 | Export all transcripts from the database
Args:
adapter(scout.adapter.MongoAdapter)
build(str)
Yields:
transcript(scout.models.Transcript) | scout/export/transcript.py | export_transcripts | Clinical-Genomics/scout | python | def export_transcripts(adapter, build='37'):
'Export all transcripts from the database\n\n Args:\n adapter(scout.adapter.MongoAdapter)\n build(str)\n\n Yields:\n transcript(scout.models.Transcript)\n '
LOG.info('Exporting all transcripts')
for tx_obj in adapter.transcripts(build=build):
(yield tx_obj) |
def fit(self, X, y):
'Fit the model to the data'
if self.normalize:
X = self._feature_scaler.fit_transform(X)
y = self._target_scaler.fit_transform(y)
X = X.to_numpy()
if self.add_intercept:
X = np.hstack((np.ones((X.shape[0], 1)), X))
y = y.to_numpy()
if (self.method == 'normal_equation'):
self._weights = np.dot(np.dot(np.linalg.inv(np.dot(X.T, X)), X.T), y)
else:
self._weights = np.zeros(X.shape[1])
self.cost_history = ([0] * self.epochs)
for i in range(self.epochs):
grad = (np.dot(X.T, (np.dot(X, self._weights) - y)) / y.shape[0])
self._weights = (self._weights - (self.lr * grad))
self.cost_history[i] = mse_score(y, np.dot(X, self._weights))
plt.scatter(range(self.epochs), self.cost_history)
plt.xlabel('epoch')
plt.ylabel('mse') | 6,111,973,649,880,488,000 | Fit the model to the data | src/models/_linear.py | fit | orsdanilo/ml-from-scratch | python | def fit(self, X, y):
if self.normalize:
X = self._feature_scaler.fit_transform(X)
y = self._target_scaler.fit_transform(y)
X = X.to_numpy()
if self.add_intercept:
X = np.hstack((np.ones((X.shape[0], 1)), X))
y = y.to_numpy()
if (self.method == 'normal_equation'):
self._weights = np.dot(np.dot(np.linalg.inv(np.dot(X.T, X)), X.T), y)
else:
self._weights = np.zeros(X.shape[1])
self.cost_history = ([0] * self.epochs)
for i in range(self.epochs):
grad = (np.dot(X.T, (np.dot(X, self._weights) - y)) / y.shape[0])
self._weights = (self._weights - (self.lr * grad))
self.cost_history[i] = mse_score(y, np.dot(X, self._weights))
plt.scatter(range(self.epochs), self.cost_history)
plt.xlabel('epoch')
plt.ylabel('mse') |
def predict(self, X):
'Use the fitted model to predict on data'
assert (self._weights is not None), 'Model needs to be fitted first. Use the fit method'
if self.normalize:
X = self._feature_scaler.transform(X)
X = X.to_numpy()
if self.add_intercept:
X = np.hstack((np.ones((X.shape[0], 1)), X))
y_pred = np.dot(X, self._weights)
if self.normalize:
y_pred = self._target_scaler.inverse_transform(y_pred)
return np.round(y_pred, 2) | 7,613,581,550,927,299,000 | Use the fitted model to predict on data | src/models/_linear.py | predict | orsdanilo/ml-from-scratch | python | def predict(self, X):
assert (self._weights is not None), 'Model needs to be fitted first. Use the fit method'
if self.normalize:
X = self._feature_scaler.transform(X)
X = X.to_numpy()
if self.add_intercept:
X = np.hstack((np.ones((X.shape[0], 1)), X))
y_pred = np.dot(X, self._weights)
if self.normalize:
y_pred = self._target_scaler.inverse_transform(y_pred)
return np.round(y_pred, 2) |
def get_weights(self):
'Get weights from the fitted model'
assert (self._weights is not None), 'Model needs to be fitted first. Use the fit method'
return self._weights | 6,695,253,196,701,115,000 | Get weights from the fitted model | src/models/_linear.py | get_weights | orsdanilo/ml-from-scratch | python | def get_weights(self):
assert (self._weights is not None), 'Model needs to be fitted first. Use the fit method'
return self._weights |
def score(self, X, y, metric='r2'):
'Score the model'
assert (metric in ['r2', 'rmse', 'mae']), "Metric not supported. Supported metrics are 'r2', 'rmse' and 'mae'"
y_pred = self.predict(X)
if (metric == 'r2'):
score = r2_score(y, y_pred)
elif (metric == 'rmse'):
score = rmse_score(y, y_pred)
elif (metric == 'mae'):
score = mae_score(y, y_pred)
return score | -1,464,843,303,557,911,800 | Score the model | src/models/_linear.py | score | orsdanilo/ml-from-scratch | python | def score(self, X, y, metric='r2'):
assert (metric in ['r2', 'rmse', 'mae']), "Metric not supported. Supported metrics are 'r2', 'rmse' and 'mae'"
y_pred = self.predict(X)
if (metric == 'r2'):
score = r2_score(y, y_pred)
elif (metric == 'rmse'):
score = rmse_score(y, y_pred)
elif (metric == 'mae'):
score = mae_score(y, y_pred)
return score |
def set_quest_cooldown(self, display_name, cooldown):
'\n Sets the quest cooldown to be the specified value.\n\n :param display_name: str - The display name of the person trying to set the cooldown\n :param cooldown: str - The raw message specifying the value to set the cooldown to\n :return:\n '
try:
self.channel_manager.set_quest_cooldown(self.owner, int(cooldown))
except (IndexError, ValueError):
self.channel_manager.bot.send_whisper(display_name, 'Invalid usage! Sample usage: !questcooldown 90') | 7,163,774,876,840,697,000 | Sets the quest cooldown to be the specified value.
:param display_name: str - The display name of the person trying to set the cooldown
:param cooldown: str - The raw message specifying the value to set the cooldown to
:return: | quest_bot/quest_channel.py | set_quest_cooldown | Xelaadryth/Xelabot | python | def set_quest_cooldown(self, display_name, cooldown):
'\n Sets the quest cooldown to be the specified value.\n\n :param display_name: str - The display name of the person trying to set the cooldown\n :param cooldown: str - The raw message specifying the value to set the cooldown to\n :return:\n '
try:
self.channel_manager.set_quest_cooldown(self.owner, int(cooldown))
except (IndexError, ValueError):
self.channel_manager.bot.send_whisper(display_name, 'Invalid usage! Sample usage: !questcooldown 90') |
def check_commands(self, display_name, msg, is_mod, is_sub):
'\n Connect to other command lists whose requirements are met.\n :param display_name: str - The display name of the command sender\n :param msg: str - The full message that the user sent that starts with "!"\n :param is_mod: bool - Whether the sender is a mod\n :param is_sub: bool - Whether the sender is a sub\n '
super().check_commands(display_name, msg, is_mod, is_sub)
self.quest_manager.commands.execute_command(display_name, msg) | -8,146,444,172,030,377,000 | Connect to other command lists whose requirements are met.
:param display_name: str - The display name of the command sender
:param msg: str - The full message that the user sent that starts with "!"
:param is_mod: bool - Whether the sender is a mod
:param is_sub: bool - Whether the sender is a sub | quest_bot/quest_channel.py | check_commands | Xelaadryth/Xelabot | python | def check_commands(self, display_name, msg, is_mod, is_sub):
'\n Connect to other command lists whose requirements are met.\n :param display_name: str - The display name of the command sender\n :param msg: str - The full message that the user sent that starts with "!"\n :param is_mod: bool - Whether the sender is a mod\n :param is_sub: bool - Whether the sender is a sub\n '
super().check_commands(display_name, msg, is_mod, is_sub)
self.quest_manager.commands.execute_command(display_name, msg) |
def list(self, scope: str, **kwargs: Any) -> AsyncIterable['_models.ComplianceResultList']:
'Security compliance results in the subscription.\n\n :param scope: Scope of the query, can be subscription\n (/subscriptions/0b06d9ea-afe6-4779-bd59-30e5c2d9d13f) or management group\n (/providers/Microsoft.Management/managementGroups/mgName).\n :type scope: str\n :keyword callable cls: A custom type or function that will be passed the direct response\n :return: An iterator like instance of either ComplianceResultList or the result of cls(response)\n :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.security.models.ComplianceResultList]\n :raises: ~azure.core.exceptions.HttpResponseError\n '
cls = kwargs.pop('cls', None)
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = '2017-08-01'
accept = 'application/json'
def prepare_request(next_link=None):
header_parameters = {}
header_parameters['Accept'] = self._serialize.header('accept', accept, 'str')
if (not next_link):
url = self.list.metadata['url']
path_format_arguments = {'scope': self._serialize.url('scope', scope, 'str', skip_quote=True)}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query('api_version', api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {}
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('ComplianceResultList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return ((deserialized.next_link or None), AsyncList(list_of_elem))
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = (await self._client._pipeline.run(request, stream=False, **kwargs))
response = pipeline_response.http_response
if (response.status_code not in [200]):
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(get_next, extract_data) | 1,449,087,768,364,436,500 | Security compliance results in the subscription.
:param scope: Scope of the query, can be subscription
(/subscriptions/0b06d9ea-afe6-4779-bd59-30e5c2d9d13f) or management group
(/providers/Microsoft.Management/managementGroups/mgName).
:type scope: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ComplianceResultList or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.security.models.ComplianceResultList]
:raises: ~azure.core.exceptions.HttpResponseError | sdk/security/azure-mgmt-security/azure/mgmt/security/aio/operations/_compliance_results_operations.py | list | AFengKK/azure-sdk-for-python | python | def list(self, scope: str, **kwargs: Any) -> AsyncIterable['_models.ComplianceResultList']:
'Security compliance results in the subscription.\n\n :param scope: Scope of the query, can be subscription\n (/subscriptions/0b06d9ea-afe6-4779-bd59-30e5c2d9d13f) or management group\n (/providers/Microsoft.Management/managementGroups/mgName).\n :type scope: str\n :keyword callable cls: A custom type or function that will be passed the direct response\n :return: An iterator like instance of either ComplianceResultList or the result of cls(response)\n :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.security.models.ComplianceResultList]\n :raises: ~azure.core.exceptions.HttpResponseError\n '
cls = kwargs.pop('cls', None)
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = '2017-08-01'
accept = 'application/json'
def prepare_request(next_link=None):
header_parameters = {}
header_parameters['Accept'] = self._serialize.header('accept', accept, 'str')
if (not next_link):
url = self.list.metadata['url']
path_format_arguments = {'scope': self._serialize.url('scope', scope, 'str', skip_quote=True)}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query('api_version', api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {}
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('ComplianceResultList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return ((deserialized.next_link or None), AsyncList(list_of_elem))
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = (await self._client._pipeline.run(request, stream=False, **kwargs))
response = pipeline_response.http_response
if (response.status_code not in [200]):
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(get_next, extract_data) |
async def get(self, resource_id: str, compliance_result_name: str, **kwargs: Any) -> '_models.ComplianceResult':
'Security Compliance Result.\n\n :param resource_id: The identifier of the resource.\n :type resource_id: str\n :param compliance_result_name: name of the desired assessment compliance result.\n :type compliance_result_name: str\n :keyword callable cls: A custom type or function that will be passed the direct response\n :return: ComplianceResult, or the result of cls(response)\n :rtype: ~azure.mgmt.security.models.ComplianceResult\n :raises: ~azure.core.exceptions.HttpResponseError\n '
cls = kwargs.pop('cls', None)
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = '2017-08-01'
accept = 'application/json'
url = self.get.metadata['url']
path_format_arguments = {'resourceId': self._serialize.url('resource_id', resource_id, 'str', skip_quote=True), 'complianceResultName': self._serialize.url('compliance_result_name', compliance_result_name, 'str')}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query('api_version', api_version, 'str')
header_parameters = {}
header_parameters['Accept'] = self._serialize.header('accept', accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = (await self._client._pipeline.run(request, stream=False, **kwargs))
response = pipeline_response.http_response
if (response.status_code not in [200]):
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ComplianceResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized | -7,370,226,887,770,046,000 | Security Compliance Result.
:param resource_id: The identifier of the resource.
:type resource_id: str
:param compliance_result_name: name of the desired assessment compliance result.
:type compliance_result_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ComplianceResult, or the result of cls(response)
:rtype: ~azure.mgmt.security.models.ComplianceResult
:raises: ~azure.core.exceptions.HttpResponseError | sdk/security/azure-mgmt-security/azure/mgmt/security/aio/operations/_compliance_results_operations.py | get | AFengKK/azure-sdk-for-python | python | async def get(self, resource_id: str, compliance_result_name: str, **kwargs: Any) -> '_models.ComplianceResult':
'Security Compliance Result.\n\n :param resource_id: The identifier of the resource.\n :type resource_id: str\n :param compliance_result_name: name of the desired assessment compliance result.\n :type compliance_result_name: str\n :keyword callable cls: A custom type or function that will be passed the direct response\n :return: ComplianceResult, or the result of cls(response)\n :rtype: ~azure.mgmt.security.models.ComplianceResult\n :raises: ~azure.core.exceptions.HttpResponseError\n '
cls = kwargs.pop('cls', None)
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = '2017-08-01'
accept = 'application/json'
url = self.get.metadata['url']
path_format_arguments = {'resourceId': self._serialize.url('resource_id', resource_id, 'str', skip_quote=True), 'complianceResultName': self._serialize.url('compliance_result_name', compliance_result_name, 'str')}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query('api_version', api_version, 'str')
header_parameters = {}
header_parameters['Accept'] = self._serialize.header('accept', accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = (await self._client._pipeline.run(request, stream=False, **kwargs))
response = pipeline_response.http_response
if (response.status_code not in [200]):
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ComplianceResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized |
def _warmup_update_lr(optimizer, epoch, init_lr, warmup_epochs, warmup_ratio=0.0):
'\n update learning rate of optimizers\n '
lr = ((((init_lr - warmup_ratio) * epoch) / warmup_epochs) + warmup_ratio)
for param_group in optimizer.param_groups:
param_group['lr'] = lr
return lr | -4,755,421,572,880,323,000 | update learning rate of optimizers | aw_nas/final/cnn_trainer.py | _warmup_update_lr | Harald-R/aw_nas | python | def _warmup_update_lr(optimizer, epoch, init_lr, warmup_epochs, warmup_ratio=0.0):
'\n \n '
lr = ((((init_lr - warmup_ratio) * epoch) / warmup_epochs) + warmup_ratio)
for param_group in optimizer.param_groups:
param_group['lr'] = lr
return lr |
async def try_initialize(input_channel: connection.TextReader, output_channel: connection.TextWriter, server_start_options_reader: PyreServerStartOptionsReader) -> Union[(InitializationSuccess, InitializationFailure, InitializationExit)]:
'\n Read an LSP message from the input channel and try to initialize an LSP\n server. Also write to the output channel with proper response if the input\n message is a request instead of a notification.\n\n The function can return one of three possibilities:\n - If the initialization succeeds, return `InitializationSuccess`.\n - If the initialization fails, return `InitializationFailure`. There could\n be many reasons for the failure: The incoming LSP message may not be an\n initiailization request. The incoming LSP request may be malformed. Or the\n client may not complete the handshake by sending back an `initialized` request.\n - If an exit notification is received, return `InitializationExit`. The LSP\n spec allows exiting a server without a preceding initialize request.\n '
request = None
try:
request = (await lsp.read_json_rpc(input_channel))
LOG.debug(f'Received pre-initialization LSP request: {request}')
request_id = request.id
if (request_id is None):
return (InitializationExit() if (request.method == 'exit') else InitializationFailure())
if (request.method != 'initialize'):
raise lsp.ServerNotInitializedError('An initialize request is needed.')
request_parameters = request.parameters
if (request_parameters is None):
raise lsp.ServerNotInitializedError('Missing parameters for initialize request.')
initialize_parameters = lsp.InitializeParameters.from_json_rpc_parameters(request_parameters)
try:
server_start_options = read_server_start_options(server_start_options_reader, remote_logging=None)
except configuration_module.InvalidConfiguration as e:
raise lsp.ServerNotInitializedError(str(e))
result = process_initialize_request(initialize_parameters, server_start_options.ide_features)
(await lsp.write_json_rpc(output_channel, json_rpc.SuccessResponse(id=request_id, result=result.to_dict())))
initialized_notification = (await lsp.read_json_rpc(input_channel))
if (initialized_notification.method == 'shutdown'):
(await _wait_for_exit(input_channel, output_channel))
return InitializationExit()
elif (initialized_notification.method != 'initialized'):
actual_message = json.dumps(initialized_notification.json())
raise lsp.ServerNotInitializedError(('Failed to receive an `initialized` request from client. ' + f'Got {log.truncate(actual_message, 100)}'))
return InitializationSuccess(client_capabilities=initialize_parameters.capabilities, client_info=initialize_parameters.client_info, initialization_options=initialize_parameters.initialization_options)
except json_rpc.JSONRPCException as json_rpc_error:
(await lsp.write_json_rpc(output_channel, json_rpc.ErrorResponse(id=(request.id if (request is not None) else None), code=json_rpc_error.error_code(), message=str(json_rpc_error), data={'retry': False})))
return InitializationFailure(exception=json_rpc_error) | -3,685,783,171,509,941,000 | Read an LSP message from the input channel and try to initialize an LSP
server. Also write to the output channel with proper response if the input
message is a request instead of a notification.
The function can return one of three possibilities:
- If the initialization succeeds, return `InitializationSuccess`.
- If the initialization fails, return `InitializationFailure`. There could
be many reasons for the failure: The incoming LSP message may not be an
initiailization request. The incoming LSP request may be malformed. Or the
client may not complete the handshake by sending back an `initialized` request.
- If an exit notification is received, return `InitializationExit`. The LSP
spec allows exiting a server without a preceding initialize request. | client/commands/persistent.py | try_initialize | dmitryvinn/pyre-check-1 | python | async def try_initialize(input_channel: connection.TextReader, output_channel: connection.TextWriter, server_start_options_reader: PyreServerStartOptionsReader) -> Union[(InitializationSuccess, InitializationFailure, InitializationExit)]:
'\n Read an LSP message from the input channel and try to initialize an LSP\n server. Also write to the output channel with proper response if the input\n message is a request instead of a notification.\n\n The function can return one of three possibilities:\n - If the initialization succeeds, return `InitializationSuccess`.\n - If the initialization fails, return `InitializationFailure`. There could\n be many reasons for the failure: The incoming LSP message may not be an\n initiailization request. The incoming LSP request may be malformed. Or the\n client may not complete the handshake by sending back an `initialized` request.\n - If an exit notification is received, return `InitializationExit`. The LSP\n spec allows exiting a server without a preceding initialize request.\n '
request = None
try:
request = (await lsp.read_json_rpc(input_channel))
LOG.debug(f'Received pre-initialization LSP request: {request}')
request_id = request.id
if (request_id is None):
return (InitializationExit() if (request.method == 'exit') else InitializationFailure())
if (request.method != 'initialize'):
raise lsp.ServerNotInitializedError('An initialize request is needed.')
request_parameters = request.parameters
if (request_parameters is None):
raise lsp.ServerNotInitializedError('Missing parameters for initialize request.')
initialize_parameters = lsp.InitializeParameters.from_json_rpc_parameters(request_parameters)
try:
server_start_options = read_server_start_options(server_start_options_reader, remote_logging=None)
except configuration_module.InvalidConfiguration as e:
raise lsp.ServerNotInitializedError(str(e))
result = process_initialize_request(initialize_parameters, server_start_options.ide_features)
(await lsp.write_json_rpc(output_channel, json_rpc.SuccessResponse(id=request_id, result=result.to_dict())))
initialized_notification = (await lsp.read_json_rpc(input_channel))
if (initialized_notification.method == 'shutdown'):
(await _wait_for_exit(input_channel, output_channel))
return InitializationExit()
elif (initialized_notification.method != 'initialized'):
actual_message = json.dumps(initialized_notification.json())
raise lsp.ServerNotInitializedError(('Failed to receive an `initialized` request from client. ' + f'Got {log.truncate(actual_message, 100)}'))
return InitializationSuccess(client_capabilities=initialize_parameters.capabilities, client_info=initialize_parameters.client_info, initialization_options=initialize_parameters.initialization_options)
except json_rpc.JSONRPCException as json_rpc_error:
(await lsp.write_json_rpc(output_channel, json_rpc.ErrorResponse(id=(request.id if (request is not None) else None), code=json_rpc_error.error_code(), message=str(json_rpc_error), data={'retry': False})))
return InitializationFailure(exception=json_rpc_error) |
async def _wait_for_exit(input_channel: connection.TextReader, output_channel: connection.TextWriter) -> None:
'\n Wait for an LSP "exit" request from the `input_channel`. This is mostly useful\n when the LSP server has received a "shutdown" request, in which case the LSP\n specification dictates that only "exit" can be sent from the client side.\n\n If a non-exit LSP request is received, drop it and keep waiting on another\n "exit" request.\n '
while True:
async with _read_lsp_request(input_channel, output_channel) as request:
if (request.method == 'exit'):
return
else:
raise json_rpc.InvalidRequestError(f'Only exit requests are accepted after shutdown. Got {request}.') | -5,747,864,269,994,665,000 | Wait for an LSP "exit" request from the `input_channel`. This is mostly useful
when the LSP server has received a "shutdown" request, in which case the LSP
specification dictates that only "exit" can be sent from the client side.
If a non-exit LSP request is received, drop it and keep waiting on another
"exit" request. | client/commands/persistent.py | _wait_for_exit | dmitryvinn/pyre-check-1 | python | async def _wait_for_exit(input_channel: connection.TextReader, output_channel: connection.TextWriter) -> None:
'\n Wait for an LSP "exit" request from the `input_channel`. This is mostly useful\n when the LSP server has received a "shutdown" request, in which case the LSP\n specification dictates that only "exit" can be sent from the client side.\n\n If a non-exit LSP request is received, drop it and keep waiting on another\n "exit" request.\n '
while True:
async with _read_lsp_request(input_channel, output_channel) as request:
if (request.method == 'exit'):
return
else:
raise json_rpc.InvalidRequestError(f'Only exit requests are accepted after shutdown. Got {request}.') |
async def process_hover_request(self, parameters: lsp.HoverTextDocumentParameters, request_id: Union[(int, str, None)]) -> None:
'Always respond to a hover request even for non-tracked paths.\n\n Otherwise, VS Code hover will wait for Pyre until it times out, meaning\n that messages from other hover providers will be delayed.'
document_path = parameters.text_document.document_uri().to_file_path()
if (document_path is None):
raise json_rpc.InvalidRequestError(f'Document URI is not a file: {parameters.text_document.uri}')
if (document_path not in self.state.opened_documents):
response = lsp.HoverResponse.empty()
else:
self.state.query_state.queries.put_nowait(TypesQuery(document_path))
response = self.state.query_state.hover_response_for_position(Path(document_path), parameters.position)
(await lsp.write_json_rpc(self.output_channel, json_rpc.SuccessResponse(id=request_id, result=response.to_dict()))) | -5,727,239,738,493,310,000 | Always respond to a hover request even for non-tracked paths.
Otherwise, VS Code hover will wait for Pyre until it times out, meaning
that messages from other hover providers will be delayed. | client/commands/persistent.py | process_hover_request | dmitryvinn/pyre-check-1 | python | async def process_hover_request(self, parameters: lsp.HoverTextDocumentParameters, request_id: Union[(int, str, None)]) -> None:
'Always respond to a hover request even for non-tracked paths.\n\n Otherwise, VS Code hover will wait for Pyre until it times out, meaning\n that messages from other hover providers will be delayed.'
document_path = parameters.text_document.document_uri().to_file_path()
if (document_path is None):
raise json_rpc.InvalidRequestError(f'Document URI is not a file: {parameters.text_document.uri}')
if (document_path not in self.state.opened_documents):
response = lsp.HoverResponse.empty()
else:
self.state.query_state.queries.put_nowait(TypesQuery(document_path))
response = self.state.query_state.hover_response_for_position(Path(document_path), parameters.position)
(await lsp.write_json_rpc(self.output_channel, json_rpc.SuccessResponse(id=request_id, result=response.to_dict()))) |
@property
def Active(self):
'\n Returns\n -------\n - obj(uhd_restpy.multivalue.Multivalue): Activate/Deactivate Configuration\n '
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['Active'])) | -8,614,067,439,674,340,000 | Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Activate/Deactivate Configuration | uhd_restpy/testplatform/sessions/ixnetwork/topology/isistrillpseudonode_173e4463dccc2001457569c77f3570e0.py | Active | OpenIxia/ixnetwork_restpy | python | @property
def Active(self):
'\n Returns\n -------\n - obj(uhd_restpy.multivalue.Multivalue): Activate/Deactivate Configuration\n '
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['Active'])) |
@property
def BroadcastRootPriority(self):
'\n Returns\n -------\n - obj(uhd_restpy.multivalue.Multivalue): Broadcast Root Priority\n '
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['BroadcastRootPriority'])) | -5,547,421,448,337,901,000 | Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Broadcast Root Priority | uhd_restpy/testplatform/sessions/ixnetwork/topology/isistrillpseudonode_173e4463dccc2001457569c77f3570e0.py | BroadcastRootPriority | OpenIxia/ixnetwork_restpy | python | @property
def BroadcastRootPriority(self):
'\n Returns\n -------\n - obj(uhd_restpy.multivalue.Multivalue): Broadcast Root Priority\n '
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['BroadcastRootPriority'])) |
@property
def Count(self):
'\n Returns\n -------\n - number: Number of elements inside associated multiplier-scaled container object, e.g. number of devices inside a Device Group.\n '
return self._get_attribute(self._SDM_ATT_MAP['Count']) | 9,202,294,428,103,448,000 | Returns
-------
- number: Number of elements inside associated multiplier-scaled container object, e.g. number of devices inside a Device Group. | uhd_restpy/testplatform/sessions/ixnetwork/topology/isistrillpseudonode_173e4463dccc2001457569c77f3570e0.py | Count | OpenIxia/ixnetwork_restpy | python | @property
def Count(self):
'\n Returns\n -------\n - number: Number of elements inside associated multiplier-scaled container object, e.g. number of devices inside a Device Group.\n '
return self._get_attribute(self._SDM_ATT_MAP['Count']) |
@property
def DescriptiveName(self):
"\n Returns\n -------\n - str: Longer, more descriptive name for element. It's not guaranteed to be unique like -name-, but may offer more context.\n "
return self._get_attribute(self._SDM_ATT_MAP['DescriptiveName']) | 6,335,322,004,352,822,000 | Returns
-------
- str: Longer, more descriptive name for element. It's not guaranteed to be unique like -name-, but may offer more context. | uhd_restpy/testplatform/sessions/ixnetwork/topology/isistrillpseudonode_173e4463dccc2001457569c77f3570e0.py | DescriptiveName | OpenIxia/ixnetwork_restpy | python | @property
def DescriptiveName(self):
"\n Returns\n -------\n - str: Longer, more descriptive name for element. It's not guaranteed to be unique like -name-, but may offer more context.\n "
return self._get_attribute(self._SDM_ATT_MAP['DescriptiveName']) |
@property
def Name(self):
'\n Returns\n -------\n - str: Name of NGPF element, guaranteed to be unique in Scenario\n '
return self._get_attribute(self._SDM_ATT_MAP['Name']) | -1,824,082,867,023,513,900 | Returns
-------
- str: Name of NGPF element, guaranteed to be unique in Scenario | uhd_restpy/testplatform/sessions/ixnetwork/topology/isistrillpseudonode_173e4463dccc2001457569c77f3570e0.py | Name | OpenIxia/ixnetwork_restpy | python | @property
def Name(self):
'\n Returns\n -------\n - str: Name of NGPF element, guaranteed to be unique in Scenario\n '
return self._get_attribute(self._SDM_ATT_MAP['Name']) |
@property
def Nickname(self):
'\n Returns\n -------\n - obj(uhd_restpy.multivalue.Multivalue): Nickname\n '
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['Nickname'])) | 4,368,694,805,941,518,000 | Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Nickname | uhd_restpy/testplatform/sessions/ixnetwork/topology/isistrillpseudonode_173e4463dccc2001457569c77f3570e0.py | Nickname | OpenIxia/ixnetwork_restpy | python | @property
def Nickname(self):
'\n Returns\n -------\n - obj(uhd_restpy.multivalue.Multivalue): Nickname\n '
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['Nickname'])) |
def update(self, Name=None):
'Updates isisTrillPseudoNode resource on the server.\n\n This method has some named parameters with a type: obj (Multivalue).\n The Multivalue class has documentation that details the possible values for those named parameters.\n\n Args\n ----\n - Name (str): Name of NGPF element, guaranteed to be unique in Scenario\n\n Raises\n ------\n - ServerError: The server has encountered an uncategorized error condition\n '
return self._update(self._map_locals(self._SDM_ATT_MAP, locals())) | -5,700,813,858,963,087,000 | Updates isisTrillPseudoNode resource on the server.
This method has some named parameters with a type: obj (Multivalue).
The Multivalue class has documentation that details the possible values for those named parameters.
Args
----
- Name (str): Name of NGPF element, guaranteed to be unique in Scenario
Raises
------
- ServerError: The server has encountered an uncategorized error condition | uhd_restpy/testplatform/sessions/ixnetwork/topology/isistrillpseudonode_173e4463dccc2001457569c77f3570e0.py | update | OpenIxia/ixnetwork_restpy | python | def update(self, Name=None):
'Updates isisTrillPseudoNode resource on the server.\n\n This method has some named parameters with a type: obj (Multivalue).\n The Multivalue class has documentation that details the possible values for those named parameters.\n\n Args\n ----\n - Name (str): Name of NGPF element, guaranteed to be unique in Scenario\n\n Raises\n ------\n - ServerError: The server has encountered an uncategorized error condition\n '
return self._update(self._map_locals(self._SDM_ATT_MAP, locals())) |
def add(self, Name=None):
'Adds a new isisTrillPseudoNode resource on the json, only valid with config assistant\n\n Args\n ----\n - Name (str): Name of NGPF element, guaranteed to be unique in Scenario\n\n Returns\n -------\n - self: This instance with all currently retrieved isisTrillPseudoNode resources using find and the newly added isisTrillPseudoNode resources available through an iterator or index\n\n Raises\n ------\n - Exception: if this function is not being used with config assistance\n '
return self._add_xpath(self._map_locals(self._SDM_ATT_MAP, locals())) | -6,872,992,947,753,101,000 | Adds a new isisTrillPseudoNode resource on the json, only valid with config assistant
Args
----
- Name (str): Name of NGPF element, guaranteed to be unique in Scenario
Returns
-------
- self: This instance with all currently retrieved isisTrillPseudoNode resources using find and the newly added isisTrillPseudoNode resources available through an iterator or index
Raises
------
- Exception: if this function is not being used with config assistance | uhd_restpy/testplatform/sessions/ixnetwork/topology/isistrillpseudonode_173e4463dccc2001457569c77f3570e0.py | add | OpenIxia/ixnetwork_restpy | python | def add(self, Name=None):
'Adds a new isisTrillPseudoNode resource on the json, only valid with config assistant\n\n Args\n ----\n - Name (str): Name of NGPF element, guaranteed to be unique in Scenario\n\n Returns\n -------\n - self: This instance with all currently retrieved isisTrillPseudoNode resources using find and the newly added isisTrillPseudoNode resources available through an iterator or index\n\n Raises\n ------\n - Exception: if this function is not being used with config assistance\n '
return self._add_xpath(self._map_locals(self._SDM_ATT_MAP, locals())) |
def find(self, Count=None, DescriptiveName=None, Name=None):
"Finds and retrieves isisTrillPseudoNode resources from the server.\n\n All named parameters are evaluated on the server using regex. The named parameters can be used to selectively retrieve isisTrillPseudoNode resources from the server.\n To retrieve an exact match ensure the parameter value starts with ^ and ends with $\n By default the find method takes no parameters and will retrieve all isisTrillPseudoNode resources from the server.\n\n Args\n ----\n - Count (number): Number of elements inside associated multiplier-scaled container object, e.g. number of devices inside a Device Group.\n - DescriptiveName (str): Longer, more descriptive name for element. It's not guaranteed to be unique like -name-, but may offer more context.\n - Name (str): Name of NGPF element, guaranteed to be unique in Scenario\n\n Returns\n -------\n - self: This instance with matching isisTrillPseudoNode resources retrieved from the server available through an iterator or index\n\n Raises\n ------\n - ServerError: The server has encountered an uncategorized error condition\n "
return self._select(self._map_locals(self._SDM_ATT_MAP, locals())) | -8,582,310,718,667,406,000 | Finds and retrieves isisTrillPseudoNode resources from the server.
All named parameters are evaluated on the server using regex. The named parameters can be used to selectively retrieve isisTrillPseudoNode resources from the server.
To retrieve an exact match ensure the parameter value starts with ^ and ends with $
By default the find method takes no parameters and will retrieve all isisTrillPseudoNode resources from the server.
Args
----
- Count (number): Number of elements inside associated multiplier-scaled container object, e.g. number of devices inside a Device Group.
- DescriptiveName (str): Longer, more descriptive name for element. It's not guaranteed to be unique like -name-, but may offer more context.
- Name (str): Name of NGPF element, guaranteed to be unique in Scenario
Returns
-------
- self: This instance with matching isisTrillPseudoNode resources retrieved from the server available through an iterator or index
Raises
------
- ServerError: The server has encountered an uncategorized error condition | uhd_restpy/testplatform/sessions/ixnetwork/topology/isistrillpseudonode_173e4463dccc2001457569c77f3570e0.py | find | OpenIxia/ixnetwork_restpy | python | def find(self, Count=None, DescriptiveName=None, Name=None):
"Finds and retrieves isisTrillPseudoNode resources from the server.\n\n All named parameters are evaluated on the server using regex. The named parameters can be used to selectively retrieve isisTrillPseudoNode resources from the server.\n To retrieve an exact match ensure the parameter value starts with ^ and ends with $\n By default the find method takes no parameters and will retrieve all isisTrillPseudoNode resources from the server.\n\n Args\n ----\n - Count (number): Number of elements inside associated multiplier-scaled container object, e.g. number of devices inside a Device Group.\n - DescriptiveName (str): Longer, more descriptive name for element. It's not guaranteed to be unique like -name-, but may offer more context.\n - Name (str): Name of NGPF element, guaranteed to be unique in Scenario\n\n Returns\n -------\n - self: This instance with matching isisTrillPseudoNode resources retrieved from the server available through an iterator or index\n\n Raises\n ------\n - ServerError: The server has encountered an uncategorized error condition\n "
return self._select(self._map_locals(self._SDM_ATT_MAP, locals())) |
def read(self, href):
'Retrieves a single instance of isisTrillPseudoNode data from the server.\n\n Args\n ----\n - href (str): An href to the instance to be retrieved\n\n Returns\n -------\n - self: This instance with the isisTrillPseudoNode resources from the server available through an iterator or index\n\n Raises\n ------\n - NotFoundError: The requested resource does not exist on the server\n - ServerError: The server has encountered an uncategorized error condition\n '
return self._read(href) | -3,411,542,355,956,305,000 | Retrieves a single instance of isisTrillPseudoNode data from the server.
Args
----
- href (str): An href to the instance to be retrieved
Returns
-------
- self: This instance with the isisTrillPseudoNode resources from the server available through an iterator or index
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition | uhd_restpy/testplatform/sessions/ixnetwork/topology/isistrillpseudonode_173e4463dccc2001457569c77f3570e0.py | read | OpenIxia/ixnetwork_restpy | python | def read(self, href):
'Retrieves a single instance of isisTrillPseudoNode data from the server.\n\n Args\n ----\n - href (str): An href to the instance to be retrieved\n\n Returns\n -------\n - self: This instance with the isisTrillPseudoNode resources from the server available through an iterator or index\n\n Raises\n ------\n - NotFoundError: The requested resource does not exist on the server\n - ServerError: The server has encountered an uncategorized error condition\n '
return self._read(href) |
def Abort(self, *args, **kwargs):
'Executes the abort operation on the server.\n\n Abort CPF control plane (equals to demote to kUnconfigured state).\n\n abort(async_operation=bool)\n ---------------------------\n - async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.\n\n Raises\n ------\n - NotFoundError: The requested resource does not exist on the server\n - ServerError: The server has encountered an uncategorized error condition\n '
payload = {'Arg1': self}
for i in range(len(args)):
payload[('Arg%s' % (i + 2))] = args[i]
for item in kwargs.items():
payload[item[0]] = item[1]
return self._execute('abort', payload=payload, response_object=None) | -4,862,486,890,617,578,000 | Executes the abort operation on the server.
Abort CPF control plane (equals to demote to kUnconfigured state).
abort(async_operation=bool)
---------------------------
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition | uhd_restpy/testplatform/sessions/ixnetwork/topology/isistrillpseudonode_173e4463dccc2001457569c77f3570e0.py | Abort | OpenIxia/ixnetwork_restpy | python | def Abort(self, *args, **kwargs):
'Executes the abort operation on the server.\n\n Abort CPF control plane (equals to demote to kUnconfigured state).\n\n abort(async_operation=bool)\n ---------------------------\n - async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.\n\n Raises\n ------\n - NotFoundError: The requested resource does not exist on the server\n - ServerError: The server has encountered an uncategorized error condition\n '
payload = {'Arg1': self}
for i in range(len(args)):
payload[('Arg%s' % (i + 2))] = args[i]
for item in kwargs.items():
payload[item[0]] = item[1]
return self._execute('abort', payload=payload, response_object=None) |
def Start(self, *args, **kwargs):
'Executes the start operation on the server.\n\n Start CPF control plane (equals to promote to negotiated state).\n\n The IxNetwork model allows for multiple method Signatures with the same name while python does not.\n\n start(async_operation=bool)\n ---------------------------\n - async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.\n\n start(SessionIndices=list, async_operation=bool)\n ------------------------------------------------\n - SessionIndices (list(number)): This parameter requires an array of session numbers 1 2 3\n - async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.\n\n start(SessionIndices=string, async_operation=bool)\n --------------------------------------------------\n - SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12\n - async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.\n\n Raises\n ------\n - NotFoundError: The requested resource does not exist on the server\n - ServerError: The server has encountered an uncategorized error condition\n '
payload = {'Arg1': self}
for i in range(len(args)):
payload[('Arg%s' % (i + 2))] = args[i]
for item in kwargs.items():
payload[item[0]] = item[1]
return self._execute('start', payload=payload, response_object=None) | -8,314,196,885,129,389,000 | Executes the start operation on the server.
Start CPF control plane (equals to promote to negotiated state).
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
start(async_operation=bool)
---------------------------
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
start(SessionIndices=list, async_operation=bool)
------------------------------------------------
- SessionIndices (list(number)): This parameter requires an array of session numbers 1 2 3
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
start(SessionIndices=string, async_operation=bool)
--------------------------------------------------
- SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition | uhd_restpy/testplatform/sessions/ixnetwork/topology/isistrillpseudonode_173e4463dccc2001457569c77f3570e0.py | Start | OpenIxia/ixnetwork_restpy | python | def Start(self, *args, **kwargs):
'Executes the start operation on the server.\n\n Start CPF control plane (equals to promote to negotiated state).\n\n The IxNetwork model allows for multiple method Signatures with the same name while python does not.\n\n start(async_operation=bool)\n ---------------------------\n - async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.\n\n start(SessionIndices=list, async_operation=bool)\n ------------------------------------------------\n - SessionIndices (list(number)): This parameter requires an array of session numbers 1 2 3\n - async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.\n\n start(SessionIndices=string, async_operation=bool)\n --------------------------------------------------\n - SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12\n - async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.\n\n Raises\n ------\n - NotFoundError: The requested resource does not exist on the server\n - ServerError: The server has encountered an uncategorized error condition\n '
payload = {'Arg1': self}
for i in range(len(args)):
payload[('Arg%s' % (i + 2))] = args[i]
for item in kwargs.items():
payload[item[0]] = item[1]
return self._execute('start', payload=payload, response_object=None) |
def Stop(self, *args, **kwargs):
'Executes the stop operation on the server.\n\n Stop CPF control plane (equals to demote to PreValidated-DoDDone state).\n\n The IxNetwork model allows for multiple method Signatures with the same name while python does not.\n\n stop(async_operation=bool)\n --------------------------\n - async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.\n\n stop(SessionIndices=list, async_operation=bool)\n -----------------------------------------------\n - SessionIndices (list(number)): This parameter requires an array of session numbers 1 2 3\n - async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.\n\n stop(SessionIndices=string, async_operation=bool)\n -------------------------------------------------\n - SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12\n - async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.\n\n Raises\n ------\n - NotFoundError: The requested resource does not exist on the server\n - ServerError: The server has encountered an uncategorized error condition\n '
payload = {'Arg1': self}
for i in range(len(args)):
payload[('Arg%s' % (i + 2))] = args[i]
for item in kwargs.items():
payload[item[0]] = item[1]
return self._execute('stop', payload=payload, response_object=None) | 8,743,726,022,297,206,000 | Executes the stop operation on the server.
Stop CPF control plane (equals to demote to PreValidated-DoDDone state).
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
stop(async_operation=bool)
--------------------------
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
stop(SessionIndices=list, async_operation=bool)
-----------------------------------------------
- SessionIndices (list(number)): This parameter requires an array of session numbers 1 2 3
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
stop(SessionIndices=string, async_operation=bool)
-------------------------------------------------
- SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition | uhd_restpy/testplatform/sessions/ixnetwork/topology/isistrillpseudonode_173e4463dccc2001457569c77f3570e0.py | Stop | OpenIxia/ixnetwork_restpy | python | def Stop(self, *args, **kwargs):
'Executes the stop operation on the server.\n\n Stop CPF control plane (equals to demote to PreValidated-DoDDone state).\n\n The IxNetwork model allows for multiple method Signatures with the same name while python does not.\n\n stop(async_operation=bool)\n --------------------------\n - async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.\n\n stop(SessionIndices=list, async_operation=bool)\n -----------------------------------------------\n - SessionIndices (list(number)): This parameter requires an array of session numbers 1 2 3\n - async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.\n\n stop(SessionIndices=string, async_operation=bool)\n -------------------------------------------------\n - SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12\n - async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.\n\n Raises\n ------\n - NotFoundError: The requested resource does not exist on the server\n - ServerError: The server has encountered an uncategorized error condition\n '
payload = {'Arg1': self}
for i in range(len(args)):
payload[('Arg%s' % (i + 2))] = args[i]
for item in kwargs.items():
payload[item[0]] = item[1]
return self._execute('stop', payload=payload, response_object=None) |
def get_device_ids(self, PortNames=None, Active=None, BroadcastRootPriority=None, Nickname=None):
'Base class infrastructure that gets a list of isisTrillPseudoNode device ids encapsulated by this object.\n\n Use the optional regex parameters in the method to refine the list of device ids encapsulated by this object.\n\n Args\n ----\n - PortNames (str): optional regex of port names\n - Active (str): optional regex of active\n - BroadcastRootPriority (str): optional regex of broadcastRootPriority\n - Nickname (str): optional regex of nickname\n\n Returns\n -------\n - list(int): A list of device ids that meets the regex criteria provided in the method parameters\n\n Raises\n ------\n - ServerError: The server has encountered an uncategorized error condition\n '
return self._get_ngpf_device_ids(locals()) | -6,627,007,136,703,285,000 | Base class infrastructure that gets a list of isisTrillPseudoNode device ids encapsulated by this object.
Use the optional regex parameters in the method to refine the list of device ids encapsulated by this object.
Args
----
- PortNames (str): optional regex of port names
- Active (str): optional regex of active
- BroadcastRootPriority (str): optional regex of broadcastRootPriority
- Nickname (str): optional regex of nickname
Returns
-------
- list(int): A list of device ids that meets the regex criteria provided in the method parameters
Raises
------
- ServerError: The server has encountered an uncategorized error condition | uhd_restpy/testplatform/sessions/ixnetwork/topology/isistrillpseudonode_173e4463dccc2001457569c77f3570e0.py | get_device_ids | OpenIxia/ixnetwork_restpy | python | def get_device_ids(self, PortNames=None, Active=None, BroadcastRootPriority=None, Nickname=None):
'Base class infrastructure that gets a list of isisTrillPseudoNode device ids encapsulated by this object.\n\n Use the optional regex parameters in the method to refine the list of device ids encapsulated by this object.\n\n Args\n ----\n - PortNames (str): optional regex of port names\n - Active (str): optional regex of active\n - BroadcastRootPriority (str): optional regex of broadcastRootPriority\n - Nickname (str): optional regex of nickname\n\n Returns\n -------\n - list(int): A list of device ids that meets the regex criteria provided in the method parameters\n\n Raises\n ------\n - ServerError: The server has encountered an uncategorized error condition\n '
return self._get_ngpf_device_ids(locals()) |
def __repr__(self):
'\n display info about this object ...\n\n :return: output\n '
x = ('Libref = %s\n' % self.libref)
x += ('Table = %s\n' % self.table)
x += ('Dsopts = %s\n' % str(self.dsopts))
x += ('Results = %s\n' % self.results)
return x | 833,230,397,643,892,900 | display info about this object ...
:return: output | saspy/sasdata.py | __repr__ | kjnh10/saspy | python | def __repr__(self):
'\n display info about this object ...\n\n :return: output\n '
x = ('Libref = %s\n' % self.libref)
x += ('Table = %s\n' % self.table)
x += ('Dsopts = %s\n' % str(self.dsopts))
x += ('Results = %s\n' % self.results)
return x |
def set_results(self, results: str):
"\n This method set the results attribute for the SASdata object; it stays in effect till changed\n results - set the default result type for this SASdata object. 'Pandas' or 'HTML' or 'TEXT'.\n\n :param results: format of results, SASsession.results is default, PANDAS, HTML or TEXT are the alternatives\n :return: None\n "
if (results.upper() == 'HTML'):
self.HTML = 1
else:
self.HTML = 0
self.results = results | 8,315,398,458,623,885,000 | This method set the results attribute for the SASdata object; it stays in effect till changed
results - set the default result type for this SASdata object. 'Pandas' or 'HTML' or 'TEXT'.
:param results: format of results, SASsession.results is default, PANDAS, HTML or TEXT are the alternatives
:return: None | saspy/sasdata.py | set_results | kjnh10/saspy | python | def set_results(self, results: str):
"\n This method set the results attribute for the SASdata object; it stays in effect till changed\n results - set the default result type for this SASdata object. 'Pandas' or 'HTML' or 'TEXT'.\n\n :param results: format of results, SASsession.results is default, PANDAS, HTML or TEXT are the alternatives\n :return: None\n "
if (results.upper() == 'HTML'):
self.HTML = 1
else:
self.HTML = 0
self.results = results |
def _returnPD(self, code, tablename, **kwargs):
'\n private function to take a sas code normally to create a table, generate pandas data frame and cleanup.\n\n :param code: string of SAS code\n :param tablename: the name of the SAS Data Set\n :param kwargs:\n :return: Pandas Data Frame\n '
if self.sas.sascfg.pandas:
raise type(self.sas.sascfg.pandas)(self.sas.sascfg.pandas.msg)
libref = kwargs.get('libref', 'work')
ll = self.sas._io.submit(code)
(check, errorMsg) = self._checkLogForError(ll['LOG'])
if (not check):
raise ValueError(('Internal code execution failed: ' + errorMsg))
if isinstance(tablename, str):
df = self.sas.sasdata2dataframe(tablename, libref)
self.sas._io.submit(('proc delete data=%s.%s; run;' % (libref, tablename)))
elif isinstance(tablename, list):
df = dict()
for t in tablename:
if self.sas.exist(t, libref):
df[t.replace('_', '').capitalize()] = self.sas.sasdata2dataframe(t, libref)
self.sas._io.submit(('proc delete data=%s.%s; run;' % (libref, t)))
else:
raise SyntaxError(('The tablename must be a string or list %s was submitted' % str(type(tablename))))
return df | -6,078,101,975,344,420,000 | private function to take a sas code normally to create a table, generate pandas data frame and cleanup.
:param code: string of SAS code
:param tablename: the name of the SAS Data Set
:param kwargs:
:return: Pandas Data Frame | saspy/sasdata.py | _returnPD | kjnh10/saspy | python | def _returnPD(self, code, tablename, **kwargs):
'\n private function to take a sas code normally to create a table, generate pandas data frame and cleanup.\n\n :param code: string of SAS code\n :param tablename: the name of the SAS Data Set\n :param kwargs:\n :return: Pandas Data Frame\n '
if self.sas.sascfg.pandas:
raise type(self.sas.sascfg.pandas)(self.sas.sascfg.pandas.msg)
libref = kwargs.get('libref', 'work')
ll = self.sas._io.submit(code)
(check, errorMsg) = self._checkLogForError(ll['LOG'])
if (not check):
raise ValueError(('Internal code execution failed: ' + errorMsg))
if isinstance(tablename, str):
df = self.sas.sasdata2dataframe(tablename, libref)
self.sas._io.submit(('proc delete data=%s.%s; run;' % (libref, tablename)))
elif isinstance(tablename, list):
df = dict()
for t in tablename:
if self.sas.exist(t, libref):
df[t.replace('_', ).capitalize()] = self.sas.sasdata2dataframe(t, libref)
self.sas._io.submit(('proc delete data=%s.%s; run;' % (libref, t)))
else:
raise SyntaxError(('The tablename must be a string or list %s was submitted' % str(type(tablename))))
return df |
def _dsopts(self):
"\n This method builds out data set options clause for this SASdata object: '(where= , keeep=, obs=, ...)'\n "
return self.sas._dsopts(self.dsopts) | -3,960,126,987,296,020,500 | This method builds out data set options clause for this SASdata object: '(where= , keeep=, obs=, ...)' | saspy/sasdata.py | _dsopts | kjnh10/saspy | python | def _dsopts(self):
"\n \n "
return self.sas._dsopts(self.dsopts) |
def where(self, where: str) -> 'SASdata':
'\n This method returns a clone of the SASdata object, with the where attribute set. The original SASdata object is not affected.\n\n :param where: the where clause to apply\n :return: SAS data object\n '
sd = SASdata(self.sas, self.libref, self.table, dsopts=dict(self.dsopts))
sd.HTML = self.HTML
sd.dsopts['where'] = where
return sd | 191,522,911,558,066,560 | This method returns a clone of the SASdata object, with the where attribute set. The original SASdata object is not affected.
:param where: the where clause to apply
:return: SAS data object | saspy/sasdata.py | where | kjnh10/saspy | python | def where(self, where: str) -> 'SASdata':
'\n This method returns a clone of the SASdata object, with the where attribute set. The original SASdata object is not affected.\n\n :param where: the where clause to apply\n :return: SAS data object\n '
sd = SASdata(self.sas, self.libref, self.table, dsopts=dict(self.dsopts))
sd.HTML = self.HTML
sd.dsopts['where'] = where
return sd |
def head(self, obs=5):
'\n display the first n rows of a table\n\n :param obs: the number of rows of the table that you want to display. The default is 5\n :return:\n '
topts = dict(self.dsopts)
topts['obs'] = obs
code = ((((('proc print data=' + self.libref) + '.') + self.table) + self.sas._dsopts(topts)) + ';run;')
if self.sas.nosub:
print(code)
return
if (self.results.upper() == 'PANDAS'):
code = ('data _head ; set %s.%s %s; run;' % (self.libref, self.table, self.sas._dsopts(topts)))
return self._returnPD(code, '_head')
else:
ll = self._is_valid()
if self.HTML:
if (not ll):
ll = self.sas._io.submit(code)
if (not self.sas.batch):
self.sas.DISPLAY(self.sas.HTML(ll['LST']))
else:
return ll
else:
if (not ll):
ll = self.sas._io.submit(code, 'text')
if (not self.sas.batch):
print(ll['LST'])
else:
return ll | 3,445,847,193,978,387,500 | display the first n rows of a table
:param obs: the number of rows of the table that you want to display. The default is 5
:return: | saspy/sasdata.py | head | kjnh10/saspy | python | def head(self, obs=5):
'\n display the first n rows of a table\n\n :param obs: the number of rows of the table that you want to display. The default is 5\n :return:\n '
topts = dict(self.dsopts)
topts['obs'] = obs
code = ((((('proc print data=' + self.libref) + '.') + self.table) + self.sas._dsopts(topts)) + ';run;')
if self.sas.nosub:
print(code)
return
if (self.results.upper() == 'PANDAS'):
code = ('data _head ; set %s.%s %s; run;' % (self.libref, self.table, self.sas._dsopts(topts)))
return self._returnPD(code, '_head')
else:
ll = self._is_valid()
if self.HTML:
if (not ll):
ll = self.sas._io.submit(code)
if (not self.sas.batch):
self.sas.DISPLAY(self.sas.HTML(ll['LST']))
else:
return ll
else:
if (not ll):
ll = self.sas._io.submit(code, 'text')
if (not self.sas.batch):
print(ll['LST'])
else:
return ll |
def tail(self, obs=5):
'\n display the last n rows of a table\n\n :param obs: the number of rows of the table that you want to display. The default is 5\n :return:\n '
code = 'proc sql;select count(*) format best32. into :lastobs from '
code += (((self.libref + '.') + self.table) + self._dsopts())
code += ';%put lastobs=&lastobs lastobsend=;\nquit;'
nosub = self.sas.nosub
self.sas.nosub = False
le = self._is_valid()
if (not le):
ll = self.sas.submit(code, 'text')
lastobs = ll['LOG'].rpartition('lastobs=')
lastobs = lastobs[2].partition(' lastobsend=')
lastobs = int(lastobs[0])
else:
lastobs = obs
firstobs = (lastobs - (obs - 1))
if (firstobs < 1):
firstobs = 1
topts = dict(self.dsopts)
topts['obs'] = lastobs
topts['firstobs'] = firstobs
code = (('proc print data=' + self.libref) + '.')
code += ((self.table + self.sas._dsopts(topts)) + ';run;')
self.sas.nosub = nosub
if self.sas.nosub:
print(code)
return
if (self.results.upper() == 'PANDAS'):
code = ('data _tail ; set %s.%s %s; run;' % (self.libref, self.table, self.sas._dsopts(topts)))
return self._returnPD(code, '_tail')
elif self.HTML:
if (not le):
ll = self.sas._io.submit(code)
else:
ll = le
if (not self.sas.batch):
self.sas.DISPLAY(self.sas.HTML(ll['LST']))
else:
return ll
else:
if (not le):
ll = self.sas._io.submit(code, 'text')
else:
ll = le
if (not self.sas.batch):
print(ll['LST'])
else:
return ll | -8,550,472,158,035,630,000 | display the last n rows of a table
:param obs: the number of rows of the table that you want to display. The default is 5
:return: | saspy/sasdata.py | tail | kjnh10/saspy | python | def tail(self, obs=5):
'\n display the last n rows of a table\n\n :param obs: the number of rows of the table that you want to display. The default is 5\n :return:\n '
code = 'proc sql;select count(*) format best32. into :lastobs from '
code += (((self.libref + '.') + self.table) + self._dsopts())
code += ';%put lastobs=&lastobs lastobsend=;\nquit;'
nosub = self.sas.nosub
self.sas.nosub = False
le = self._is_valid()
if (not le):
ll = self.sas.submit(code, 'text')
lastobs = ll['LOG'].rpartition('lastobs=')
lastobs = lastobs[2].partition(' lastobsend=')
lastobs = int(lastobs[0])
else:
lastobs = obs
firstobs = (lastobs - (obs - 1))
if (firstobs < 1):
firstobs = 1
topts = dict(self.dsopts)
topts['obs'] = lastobs
topts['firstobs'] = firstobs
code = (('proc print data=' + self.libref) + '.')
code += ((self.table + self.sas._dsopts(topts)) + ';run;')
self.sas.nosub = nosub
if self.sas.nosub:
print(code)
return
if (self.results.upper() == 'PANDAS'):
code = ('data _tail ; set %s.%s %s; run;' % (self.libref, self.table, self.sas._dsopts(topts)))
return self._returnPD(code, '_tail')
elif self.HTML:
if (not le):
ll = self.sas._io.submit(code)
else:
ll = le
if (not self.sas.batch):
self.sas.DISPLAY(self.sas.HTML(ll['LST']))
else:
return ll
else:
if (not le):
ll = self.sas._io.submit(code, 'text')
else:
ll = le
if (not self.sas.batch):
print(ll['LST'])
else:
return ll |
def obs(self):
'\n return the number of observations for your SASdata object\n '
code = 'proc sql;select count(*) format best32. into :lastobs from '
code += (((self.libref + '.') + self.table) + self._dsopts())
code += ';%put lastobs=&lastobs lastobsend=;\nquit;'
if self.sas.nosub:
print(code)
return
le = self._is_valid()
if (not le):
ll = self.sas.submit(code, 'text')
lastobs = ll['LOG'].rpartition('lastobs=')
lastobs = lastobs[2].partition(' lastobsend=')
lastobs = int(lastobs[0])
else:
print("The SASdata object is not valid. The table doesn't exist in this SAS session at this time.")
lastobs = None
return lastobs | 7,016,958,279,017,528,000 | return the number of observations for your SASdata object | saspy/sasdata.py | obs | kjnh10/saspy | python | def obs(self):
'\n \n '
code = 'proc sql;select count(*) format best32. into :lastobs from '
code += (((self.libref + '.') + self.table) + self._dsopts())
code += ';%put lastobs=&lastobs lastobsend=;\nquit;'
if self.sas.nosub:
print(code)
return
le = self._is_valid()
if (not le):
ll = self.sas.submit(code, 'text')
lastobs = ll['LOG'].rpartition('lastobs=')
lastobs = lastobs[2].partition(' lastobsend=')
lastobs = int(lastobs[0])
else:
print("The SASdata object is not valid. The table doesn't exist in this SAS session at this time.")
lastobs = None
return lastobs |
def partition(self, var: str='', fraction: float=0.7, seed: int=9878, kfold: int=1, out: 'SASdata'=None, singleOut: bool=True) -> object:
'\n Partition a sas data object using SRS sampling or if a variable is specified then\n stratifying with respect to that variable\n\n :param var: variable(s) for stratification. If multiple then space delimited list\n :param fraction: fraction to split\n :param seed: random seed\n :param kfold: number of k folds\n :param out: the SAS data object\n :param singleOut: boolean to return single table or seperate tables\n :return: Tuples or SAS data object\n '
i = 1
code = ''
try:
k = int(kfold)
except ValueError:
print('Kfold must be an integer')
if (out is None):
out_table = self.table
out_libref = self.libref
elif (not isinstance(out, str)):
out_table = out.table
out_libref = out.libref
else:
try:
out_table = out.split('.')[1]
out_libref = out.split('.')[0]
except IndexError:
out_table = out
out_libref = 'work'
while (i <= k):
if (k == 1):
code += ('proc hpsample data=%s.%s %s out=%s.%s %s samppct=%s seed=%s Partition;\n' % (self.libref, self.table, self._dsopts(), out_libref, out_table, self._dsopts(), (fraction * 100), seed))
else:
seed += 1
code += ('proc hpsample data=%s.%s %s out=%s.%s %s samppct=%s seed=%s partition PARTINDNAME=_cvfold%s;\n' % (self.libref, self.table, self._dsopts(), out_libref, out_table, self._dsopts(), (fraction * 100), seed, i))
if (len(var) > 0):
if (i == 1):
num_string = "\n data _null_; file LOG;\n d = open('{0}.{1}');\n nvars = attrn(d, 'NVARS'); \n put 'VARLIST=';\n do i = 1 to nvars; \n vart = vartype(d, i);\n var = varname(d, i);\n if vart eq 'N' then\n put %upcase('var=') var %upcase('varEND=');\n end;\n put 'VARLISTEND=';\n run;\n "
nosub = self.sas.nosub
self.sas.nosub = False
ll = self.sas.submit(num_string.format(self.libref, (self.table + self._dsopts())))
self.sas.nosub = nosub
numlist = []
log = ll['LOG'].rpartition('VARLISTEND=')[0].rpartition('VARLIST=')
for vari in range(log[2].count('VAR=')):
log = log[2].partition('VAR=')[2].partition(' VAREND=')
numlist.append(log[0].strip())
if isinstance(var, str):
tlist = var.split()
elif isinstance(var, list):
tlist = var
else:
raise SyntaxError(('var must be a string or list you submitted: %s' % str(type(var))))
if set(numlist).isdisjoint(tlist):
if isinstance(var, str):
code += ('class _character_;\ntarget %s;\nvar _numeric_;\n' % var)
else:
code += ('class _character_;\ntarget %s;\nvar _numeric_;\n' % ' '.join(var))
else:
varlist = [x for x in numlist if (x not in tlist)]
varlist.extend([('_cvfold%s' % j) for j in range(1, i) if ((k > 1) and (i > 1))])
code += ('class %s _character_;\ntarget %s;\nvar %s;\n' % (var, var, ' '.join(varlist)))
else:
code += 'class _character_;\nvar _numeric_;\n'
code += 'run;\n'
i += 1
split_code = ''
if (not singleOut):
split_code += 'DATA '
for j in range(1, (k + 1)):
split_code += ('\t%s.%s%s_train(drop=_Partind_ _cvfold:)\n' % (out_libref, out_table, j))
split_code += ('\t%s.%s%s_score(drop=_Partind_ _cvfold:)\n' % (out_libref, out_table, j))
split_code += (';\n \tset %s.%s;\n' % (out_libref, out_table))
for z in range(1, (k + 1)):
split_code += ('\tif _cvfold%s = 1 or _partind_ = 1 then output %s.%s%s_train;\n' % (z, out_libref, out_table, z))
split_code += ('\telse output %s.%s%s_score;\n' % (out_libref, out_table, z))
split_code += 'run;'
runcode = True
if self.sas.nosub:
print(((code + '\n\n') + split_code))
runcode = False
ll = self._is_valid()
if ll:
runcode = False
if runcode:
ll = self.sas.submit((code + split_code), 'text')
elog = []
for line in ll['LOG'].splitlines():
if line[self.sas.logoffset:].startswith('ERROR'):
elog.append(line)
if len(elog):
raise RuntimeError('\n'.join(elog))
if (not singleOut):
outTableList = []
if (k == 1):
return (self.sas.sasdata(((out_table + str(k)) + '_train'), out_libref, dsopts=self._dsopts()), self.sas.sasdata(((out_table + str(k)) + '_score'), out_libref, dsopts=self._dsopts()))
for j in range(1, (k + 1)):
outTableList.append((self.sas.sasdata(((out_table + str(j)) + '_train'), out_libref, dsopts=self._dsopts()), self.sas.sasdata(((out_table + str(j)) + '_score'), out_libref, dsopts=self._dsopts())))
return outTableList
if out:
if (not isinstance(out, str)):
return out
else:
return self.sas.sasdata(out_table, out_libref, self.results)
else:
return self | -2,989,308,485,039,091,700 | Partition a sas data object using SRS sampling or if a variable is specified then
stratifying with respect to that variable
:param var: variable(s) for stratification. If multiple then space delimited list
:param fraction: fraction to split
:param seed: random seed
:param kfold: number of k folds
:param out: the SAS data object
:param singleOut: boolean to return single table or seperate tables
:return: Tuples or SAS data object | saspy/sasdata.py | partition | kjnh10/saspy | python | def partition(self, var: str=, fraction: float=0.7, seed: int=9878, kfold: int=1, out: 'SASdata'=None, singleOut: bool=True) -> object:
'\n Partition a sas data object using SRS sampling or if a variable is specified then\n stratifying with respect to that variable\n\n :param var: variable(s) for stratification. If multiple then space delimited list\n :param fraction: fraction to split\n :param seed: random seed\n :param kfold: number of k folds\n :param out: the SAS data object\n :param singleOut: boolean to return single table or seperate tables\n :return: Tuples or SAS data object\n '
i = 1
code =
try:
k = int(kfold)
except ValueError:
print('Kfold must be an integer')
if (out is None):
out_table = self.table
out_libref = self.libref
elif (not isinstance(out, str)):
out_table = out.table
out_libref = out.libref
else:
try:
out_table = out.split('.')[1]
out_libref = out.split('.')[0]
except IndexError:
out_table = out
out_libref = 'work'
while (i <= k):
if (k == 1):
code += ('proc hpsample data=%s.%s %s out=%s.%s %s samppct=%s seed=%s Partition;\n' % (self.libref, self.table, self._dsopts(), out_libref, out_table, self._dsopts(), (fraction * 100), seed))
else:
seed += 1
code += ('proc hpsample data=%s.%s %s out=%s.%s %s samppct=%s seed=%s partition PARTINDNAME=_cvfold%s;\n' % (self.libref, self.table, self._dsopts(), out_libref, out_table, self._dsopts(), (fraction * 100), seed, i))
if (len(var) > 0):
if (i == 1):
num_string = "\n data _null_; file LOG;\n d = open('{0}.{1}');\n nvars = attrn(d, 'NVARS'); \n put 'VARLIST=';\n do i = 1 to nvars; \n vart = vartype(d, i);\n var = varname(d, i);\n if vart eq 'N' then\n put %upcase('var=') var %upcase('varEND=');\n end;\n put 'VARLISTEND=';\n run;\n "
nosub = self.sas.nosub
self.sas.nosub = False
ll = self.sas.submit(num_string.format(self.libref, (self.table + self._dsopts())))
self.sas.nosub = nosub
numlist = []
log = ll['LOG'].rpartition('VARLISTEND=')[0].rpartition('VARLIST=')
for vari in range(log[2].count('VAR=')):
log = log[2].partition('VAR=')[2].partition(' VAREND=')
numlist.append(log[0].strip())
if isinstance(var, str):
tlist = var.split()
elif isinstance(var, list):
tlist = var
else:
raise SyntaxError(('var must be a string or list you submitted: %s' % str(type(var))))
if set(numlist).isdisjoint(tlist):
if isinstance(var, str):
code += ('class _character_;\ntarget %s;\nvar _numeric_;\n' % var)
else:
code += ('class _character_;\ntarget %s;\nvar _numeric_;\n' % ' '.join(var))
else:
varlist = [x for x in numlist if (x not in tlist)]
varlist.extend([('_cvfold%s' % j) for j in range(1, i) if ((k > 1) and (i > 1))])
code += ('class %s _character_;\ntarget %s;\nvar %s;\n' % (var, var, ' '.join(varlist)))
else:
code += 'class _character_;\nvar _numeric_;\n'
code += 'run;\n'
i += 1
split_code =
if (not singleOut):
split_code += 'DATA '
for j in range(1, (k + 1)):
split_code += ('\t%s.%s%s_train(drop=_Partind_ _cvfold:)\n' % (out_libref, out_table, j))
split_code += ('\t%s.%s%s_score(drop=_Partind_ _cvfold:)\n' % (out_libref, out_table, j))
split_code += (';\n \tset %s.%s;\n' % (out_libref, out_table))
for z in range(1, (k + 1)):
split_code += ('\tif _cvfold%s = 1 or _partind_ = 1 then output %s.%s%s_train;\n' % (z, out_libref, out_table, z))
split_code += ('\telse output %s.%s%s_score;\n' % (out_libref, out_table, z))
split_code += 'run;'
runcode = True
if self.sas.nosub:
print(((code + '\n\n') + split_code))
runcode = False
ll = self._is_valid()
if ll:
runcode = False
if runcode:
ll = self.sas.submit((code + split_code), 'text')
elog = []
for line in ll['LOG'].splitlines():
if line[self.sas.logoffset:].startswith('ERROR'):
elog.append(line)
if len(elog):
raise RuntimeError('\n'.join(elog))
if (not singleOut):
outTableList = []
if (k == 1):
return (self.sas.sasdata(((out_table + str(k)) + '_train'), out_libref, dsopts=self._dsopts()), self.sas.sasdata(((out_table + str(k)) + '_score'), out_libref, dsopts=self._dsopts()))
for j in range(1, (k + 1)):
outTableList.append((self.sas.sasdata(((out_table + str(j)) + '_train'), out_libref, dsopts=self._dsopts()), self.sas.sasdata(((out_table + str(j)) + '_score'), out_libref, dsopts=self._dsopts())))
return outTableList
if out:
if (not isinstance(out, str)):
return out
else:
return self.sas.sasdata(out_table, out_libref, self.results)
else:
return self |
def contents(self):
'\n display metadata about the table. size, number of rows, columns and their data type ...\n\n :return: output\n '
code = ((((('proc contents data=' + self.libref) + '.') + self.table) + self._dsopts()) + ';run;')
if self.sas.nosub:
print(code)
return
ll = self._is_valid()
if (self.results.upper() == 'PANDAS'):
code = ('proc contents data=%s.%s %s ;' % (self.libref, self.table, self._dsopts()))
code += 'ods output Attributes=work._attributes;'
code += 'ods output EngineHost=work._EngineHost;'
code += 'ods output Variables=work._Variables;'
code += 'ods output Sortedby=work._Sortedby;'
code += 'run;'
return self._returnPD(code, ['_attributes', '_EngineHost', '_Variables', '_Sortedby'])
elif self.HTML:
if (not ll):
ll = self.sas._io.submit(code)
if (not self.sas.batch):
self.sas.DISPLAY(self.sas.HTML(ll['LST']))
else:
return ll
else:
if (not ll):
ll = self.sas._io.submit(code, 'text')
if (not self.sas.batch):
print(ll['LST'])
else:
return ll | -5,023,457,607,378,223,000 | display metadata about the table. size, number of rows, columns and their data type ...
:return: output | saspy/sasdata.py | contents | kjnh10/saspy | python | def contents(self):
'\n display metadata about the table. size, number of rows, columns and their data type ...\n\n :return: output\n '
code = ((((('proc contents data=' + self.libref) + '.') + self.table) + self._dsopts()) + ';run;')
if self.sas.nosub:
print(code)
return
ll = self._is_valid()
if (self.results.upper() == 'PANDAS'):
code = ('proc contents data=%s.%s %s ;' % (self.libref, self.table, self._dsopts()))
code += 'ods output Attributes=work._attributes;'
code += 'ods output EngineHost=work._EngineHost;'
code += 'ods output Variables=work._Variables;'
code += 'ods output Sortedby=work._Sortedby;'
code += 'run;'
return self._returnPD(code, ['_attributes', '_EngineHost', '_Variables', '_Sortedby'])
elif self.HTML:
if (not ll):
ll = self.sas._io.submit(code)
if (not self.sas.batch):
self.sas.DISPLAY(self.sas.HTML(ll['LST']))
else:
return ll
else:
if (not ll):
ll = self.sas._io.submit(code, 'text')
if (not self.sas.batch):
print(ll['LST'])
else:
return ll |
def columnInfo(self):
'\n display metadata about the table, size, number of rows, columns and their data type\n '
code = (((((('proc contents data=' + self.libref) + '.') + self.table) + ' ') + self._dsopts()) + ';ods select Variables;run;')
if self.sas.nosub:
print(code)
return
if (self.results.upper() == 'PANDAS'):
code = ('proc contents data=%s.%s %s ;ods output Variables=work._variables ;run;' % (self.libref, self.table, self._dsopts()))
df = self._returnPD(code, '_variables')
df['Type'] = df['Type'].str.rstrip()
return df
else:
ll = self._is_valid()
if self.HTML:
if (not ll):
ll = self.sas._io.submit(code)
if (not self.sas.batch):
self.sas.DISPLAY(self.sas.HTML(ll['LST']))
else:
return ll
else:
if (not ll):
ll = self.sas._io.submit(code, 'text')
if (not self.sas.batch):
print(ll['LST'])
else:
return ll | 644,311,950,941,764,900 | display metadata about the table, size, number of rows, columns and their data type | saspy/sasdata.py | columnInfo | kjnh10/saspy | python | def columnInfo(self):
'\n \n '
code = (((((('proc contents data=' + self.libref) + '.') + self.table) + ' ') + self._dsopts()) + ';ods select Variables;run;')
if self.sas.nosub:
print(code)
return
if (self.results.upper() == 'PANDAS'):
code = ('proc contents data=%s.%s %s ;ods output Variables=work._variables ;run;' % (self.libref, self.table, self._dsopts()))
df = self._returnPD(code, '_variables')
df['Type'] = df['Type'].str.rstrip()
return df
else:
ll = self._is_valid()
if self.HTML:
if (not ll):
ll = self.sas._io.submit(code)
if (not self.sas.batch):
self.sas.DISPLAY(self.sas.HTML(ll['LST']))
else:
return ll
else:
if (not ll):
ll = self.sas._io.submit(code, 'text')
if (not self.sas.batch):
print(ll['LST'])
else:
return ll |
def info(self):
'\n Display the column info on a SAS data object\n\n :return: Pandas data frame\n '
if (self.results.casefold() != 'pandas'):
print('The info method only works with Pandas results')
return None
info_code = "\n data work._statsInfo ;\n do rows=0 by 1 while( not last ) ;\n set {0}.{1}{2} end=last;\n array chrs _character_ ;\n array nums _numeric_ ;\n array ccounts(999) _temporary_ ;\n array ncounts(999) _temporary_ ;\n do over chrs;\n ccounts(_i_) + missing(chrs) ;\n end;\n do over nums;\n ncounts(_i_) + missing(nums);\n end; \n end ;\n length Variable $32 type $8. ;\n Do over chrs;\n Type = 'char';\n Variable = vname(chrs) ;\n N = rows;\n Nmiss = ccounts(_i_) ;\n Output ;\n end ;\n Do over nums;\n Type = 'numeric';\n Variable = vname(nums) ;\n N = rows;\n Nmiss = ncounts(_i_) ;\n if variable ^= 'rows' then output;\n end ;\n stop;\n keep Variable N NMISS Type ;\n run;\n "
if self.sas.nosub:
print(info_code.format(self.libref, self.table, self._dsopts()))
return None
df = self._returnPD(info_code.format(self.libref, self.table, self._dsopts()), '_statsInfo')
df = df.iloc[:, :]
df.index.name = None
df.name = None
return df | 6,330,648,730,209,849,000 | Display the column info on a SAS data object
:return: Pandas data frame | saspy/sasdata.py | info | kjnh10/saspy | python | def info(self):
'\n Display the column info on a SAS data object\n\n :return: Pandas data frame\n '
if (self.results.casefold() != 'pandas'):
print('The info method only works with Pandas results')
return None
info_code = "\n data work._statsInfo ;\n do rows=0 by 1 while( not last ) ;\n set {0}.{1}{2} end=last;\n array chrs _character_ ;\n array nums _numeric_ ;\n array ccounts(999) _temporary_ ;\n array ncounts(999) _temporary_ ;\n do over chrs;\n ccounts(_i_) + missing(chrs) ;\n end;\n do over nums;\n ncounts(_i_) + missing(nums);\n end; \n end ;\n length Variable $32 type $8. ;\n Do over chrs;\n Type = 'char';\n Variable = vname(chrs) ;\n N = rows;\n Nmiss = ccounts(_i_) ;\n Output ;\n end ;\n Do over nums;\n Type = 'numeric';\n Variable = vname(nums) ;\n N = rows;\n Nmiss = ncounts(_i_) ;\n if variable ^= 'rows' then output;\n end ;\n stop;\n keep Variable N NMISS Type ;\n run;\n "
if self.sas.nosub:
print(info_code.format(self.libref, self.table, self._dsopts()))
return None
df = self._returnPD(info_code.format(self.libref, self.table, self._dsopts()), '_statsInfo')
df = df.iloc[:, :]
df.index.name = None
df.name = None
return df |
def describe(self):
'\n display descriptive statistics for the table; summary statistics.\n\n :return:\n '
return self.means() | -8,534,865,489,948,653,000 | display descriptive statistics for the table; summary statistics.
:return: | saspy/sasdata.py | describe | kjnh10/saspy | python | def describe(self):
'\n display descriptive statistics for the table; summary statistics.\n\n :return:\n '
return self.means() |
def means(self):
"\n display descriptive statistics for the table; summary statistics. This is an alias for 'describe'\n\n :return:\n "
dsopts = self._dsopts().partition(';\n\tformat')
code = ((((('proc means data=' + self.libref) + '.') + self.table) + dsopts[0]) + ' stackodsoutput n nmiss median mean std min p25 p50 p75 max;')
code += ((dsopts[1] + dsopts[2]) + 'run;')
if self.sas.nosub:
print(code)
return
ll = self._is_valid()
if (self.results.upper() == 'PANDAS'):
code = ('proc means data=%s.%s %s stackodsoutput n nmiss median mean std min p25 p50 p75 max; %s ods output Summary=work._summary; run;' % (self.libref, self.table, dsopts[0], (dsopts[1] + dsopts[2])))
return self._returnPD(code, '_summary')
elif self.HTML:
if (not ll):
ll = self.sas._io.submit(code)
if (not self.sas.batch):
self.sas.DISPLAY(self.sas.HTML(ll['LST']))
else:
return ll
else:
if (not ll):
ll = self.sas._io.submit(code, 'text')
if (not self.sas.batch):
print(ll['LST'])
else:
return ll | 5,236,874,011,568,663,000 | display descriptive statistics for the table; summary statistics. This is an alias for 'describe'
:return: | saspy/sasdata.py | means | kjnh10/saspy | python | def means(self):
"\n display descriptive statistics for the table; summary statistics. This is an alias for 'describe'\n\n :return:\n "
dsopts = self._dsopts().partition(';\n\tformat')
code = ((((('proc means data=' + self.libref) + '.') + self.table) + dsopts[0]) + ' stackodsoutput n nmiss median mean std min p25 p50 p75 max;')
code += ((dsopts[1] + dsopts[2]) + 'run;')
if self.sas.nosub:
print(code)
return
ll = self._is_valid()
if (self.results.upper() == 'PANDAS'):
code = ('proc means data=%s.%s %s stackodsoutput n nmiss median mean std min p25 p50 p75 max; %s ods output Summary=work._summary; run;' % (self.libref, self.table, dsopts[0], (dsopts[1] + dsopts[2])))
return self._returnPD(code, '_summary')
elif self.HTML:
if (not ll):
ll = self.sas._io.submit(code)
if (not self.sas.batch):
self.sas.DISPLAY(self.sas.HTML(ll['LST']))
else:
return ll
else:
if (not ll):
ll = self.sas._io.submit(code, 'text')
if (not self.sas.batch):
print(ll['LST'])
else:
return ll |
def impute(self, vars: dict, replace: bool=False, prefix: str='imp_', out: 'SASdata'=None) -> 'SASdata':
"\n Imputes missing values for a SASdata object.\n\n :param vars: a dictionary in the form of {'varname':'impute type'} or {'impute type':'[var1, var2]'}\n :param replace:\n :param prefix:\n :param out:\n :return:\n "
outstr = ''
if out:
if isinstance(out, str):
fn = out.partition('.')
if (fn[1] == '.'):
out_libref = fn[0]
out_table = fn[2]
else:
out_libref = ''
out_table = fn[0]
else:
out_libref = out.libref
out_table = out.table
outstr = ('out=%s.%s' % (out_libref, out_table))
else:
out_table = self.table
out_libref = self.libref
varcode = (((("data _null_; d = open('" + self.libref) + '.') + self.table) + "');\n")
varcode += "nvars = attrn(d, 'NVARS');\n"
varcode += "put 'VARNUMS=' nvars 'VARNUMS_END=';\n"
varcode += "put 'VARLIST=';\n"
varcode += "do i = 1 to nvars; var = varname(d, i); put %upcase('var=') var %upcase('varEND='); end;\n"
varcode += "put 'TYPELIST=';\n"
varcode += "do i = 1 to nvars; var = vartype(d, i); put %upcase('type=') var %upcase('typeEND='); end;\n"
varcode += "put 'END_ALL_VARS_AND_TYPES=';\n"
varcode += 'run;'
ll = self.sas._io.submit(varcode, 'text')
l2 = ll['LOG'].rpartition('VARNUMS=')[2].partition('VARNUMS_END=')
nvars = int(float(l2[0].strip()))
varlist = []
log = ll['LOG'].rpartition('TYPELIST=')[0].rpartition('VARLIST=')
for vari in range(log[2].count('VAR=')):
log = log[2].partition('VAR=')[2].partition('VAREND=')
varlist.append(log[0].strip().upper())
typelist = []
log = ll['LOG'].rpartition('END_ALL_VARS_AND_TYPES=')[0].rpartition('TYPELIST=')
for typei in range(log[2].count('VAR=')):
log = log[2].partition('TYPE=')[2].partition('TYPEEND=')
typelist.append(log[0].strip().upper())
varListType = dict(zip(varlist, typelist))
sql = 'proc sql;\n select\n'
sqlsel = ' %s(%s),\n'
sqlinto = ' into\n'
if (len(out_libref) > 0):
ds1 = ((((((((('data ' + out_libref) + '.') + out_table) + '; set ') + self.libref) + '.') + self.table) + self._dsopts()) + ';\n')
else:
ds1 = ((((((('data ' + out_table) + '; set ') + self.libref) + '.') + self.table) + self._dsopts()) + ';\n')
dsmiss = 'if missing({0}) then {1} = {2};\n'
if replace:
dsmiss = (prefix + ('{1} = {0}; if missing({0}) then %s{1} = {2};\n' % prefix))
modesql = ''
modeq = 'proc sql outobs=1;\n select %s, count(*) as freq into :imp_mode_%s, :imp_mode_freq\n'
modeq += ' from %s where %s is not null group by %s order by freq desc, %s;\nquit;\n'
contantValues = vars.pop('value', None)
if (contantValues is not None):
if (not all((isinstance(x, tuple) for x in contantValues))):
raise SyntaxError("The elements in the 'value' key must be tuples")
for t in contantValues:
if (varListType.get(t[0].upper()) == 'N'):
ds1 += dsmiss.format((t[0], t[0], t[1]))
else:
ds1 += dsmiss.format(t[0], t[0], (('"' + str(t[1])) + '"'))
for (key, values) in vars.items():
if (key.lower() in ['midrange', 'random']):
for v in values:
sql += (sqlsel % ('max', v))
sql += (sqlsel % ('min', v))
sqlinto += ((' :imp_max_' + v) + ',\n')
sqlinto += ((' :imp_min_' + v) + ',\n')
if (key.lower() == 'midrange'):
ds1 += dsmiss.format(v, v, ((((((('(&imp_min_' + v) + '.') + ' + ') + '&imp_max_') + v) + '.') + ') / 2'))
elif (key.lower() == 'random'):
ds1 += dsmiss.format(v, v, (((((((((('(&imp_max_' + v) + '.') + ' - ') + '&imp_min_') + v) + '.') + ') * ranuni(0)') + '+ &imp_min_') + v) + '.'))
else:
raise SyntaxError('This should not happen!!!!')
else:
for v in values:
sql += (sqlsel % (key, v))
sqlinto += ((' :imp_' + v) + ',\n')
if (key.lower == 'mode'):
modesql += (modeq % (v, v, (((self.libref + '.') + self.table) + self._dsopts()), v, v, v))
if (varListType.get(v.upper()) == 'N'):
ds1 += dsmiss.format(v, v, (('&imp_' + v) + '.'))
else:
ds1 += dsmiss.format(v, v, (('"&imp_' + v) + '."'))
if (len(sql) > 20):
sql = ((((((((sql.rstrip(', \n') + '\n') + sqlinto.rstrip(', \n')) + '\n from ') + self.libref) + '.') + self.table) + self._dsopts()) + ';\nquit;\n')
else:
sql = ''
ds1 += 'run;\n'
if self.sas.nosub:
print(((modesql + sql) + ds1))
return None
ll = self.sas.submit(((modesql + sql) + ds1))
return self.sas.sasdata(out_table, libref=out_libref, results=self.results, dsopts=self._dsopts()) | -6,392,868,252,885,690,000 | Imputes missing values for a SASdata object.
:param vars: a dictionary in the form of {'varname':'impute type'} or {'impute type':'[var1, var2]'}
:param replace:
:param prefix:
:param out:
:return: | saspy/sasdata.py | impute | kjnh10/saspy | python | def impute(self, vars: dict, replace: bool=False, prefix: str='imp_', out: 'SASdata'=None) -> 'SASdata':
"\n Imputes missing values for a SASdata object.\n\n :param vars: a dictionary in the form of {'varname':'impute type'} or {'impute type':'[var1, var2]'}\n :param replace:\n :param prefix:\n :param out:\n :return:\n "
outstr =
if out:
if isinstance(out, str):
fn = out.partition('.')
if (fn[1] == '.'):
out_libref = fn[0]
out_table = fn[2]
else:
out_libref =
out_table = fn[0]
else:
out_libref = out.libref
out_table = out.table
outstr = ('out=%s.%s' % (out_libref, out_table))
else:
out_table = self.table
out_libref = self.libref
varcode = (((("data _null_; d = open('" + self.libref) + '.') + self.table) + "');\n")
varcode += "nvars = attrn(d, 'NVARS');\n"
varcode += "put 'VARNUMS=' nvars 'VARNUMS_END=';\n"
varcode += "put 'VARLIST=';\n"
varcode += "do i = 1 to nvars; var = varname(d, i); put %upcase('var=') var %upcase('varEND='); end;\n"
varcode += "put 'TYPELIST=';\n"
varcode += "do i = 1 to nvars; var = vartype(d, i); put %upcase('type=') var %upcase('typeEND='); end;\n"
varcode += "put 'END_ALL_VARS_AND_TYPES=';\n"
varcode += 'run;'
ll = self.sas._io.submit(varcode, 'text')
l2 = ll['LOG'].rpartition('VARNUMS=')[2].partition('VARNUMS_END=')
nvars = int(float(l2[0].strip()))
varlist = []
log = ll['LOG'].rpartition('TYPELIST=')[0].rpartition('VARLIST=')
for vari in range(log[2].count('VAR=')):
log = log[2].partition('VAR=')[2].partition('VAREND=')
varlist.append(log[0].strip().upper())
typelist = []
log = ll['LOG'].rpartition('END_ALL_VARS_AND_TYPES=')[0].rpartition('TYPELIST=')
for typei in range(log[2].count('VAR=')):
log = log[2].partition('TYPE=')[2].partition('TYPEEND=')
typelist.append(log[0].strip().upper())
varListType = dict(zip(varlist, typelist))
sql = 'proc sql;\n select\n'
sqlsel = ' %s(%s),\n'
sqlinto = ' into\n'
if (len(out_libref) > 0):
ds1 = ((((((((('data ' + out_libref) + '.') + out_table) + '; set ') + self.libref) + '.') + self.table) + self._dsopts()) + ';\n')
else:
ds1 = ((((((('data ' + out_table) + '; set ') + self.libref) + '.') + self.table) + self._dsopts()) + ';\n')
dsmiss = 'if missing({0}) then {1} = {2};\n'
if replace:
dsmiss = (prefix + ('{1} = {0}; if missing({0}) then %s{1} = {2};\n' % prefix))
modesql =
modeq = 'proc sql outobs=1;\n select %s, count(*) as freq into :imp_mode_%s, :imp_mode_freq\n'
modeq += ' from %s where %s is not null group by %s order by freq desc, %s;\nquit;\n'
contantValues = vars.pop('value', None)
if (contantValues is not None):
if (not all((isinstance(x, tuple) for x in contantValues))):
raise SyntaxError("The elements in the 'value' key must be tuples")
for t in contantValues:
if (varListType.get(t[0].upper()) == 'N'):
ds1 += dsmiss.format((t[0], t[0], t[1]))
else:
ds1 += dsmiss.format(t[0], t[0], (('"' + str(t[1])) + '"'))
for (key, values) in vars.items():
if (key.lower() in ['midrange', 'random']):
for v in values:
sql += (sqlsel % ('max', v))
sql += (sqlsel % ('min', v))
sqlinto += ((' :imp_max_' + v) + ',\n')
sqlinto += ((' :imp_min_' + v) + ',\n')
if (key.lower() == 'midrange'):
ds1 += dsmiss.format(v, v, ((((((('(&imp_min_' + v) + '.') + ' + ') + '&imp_max_') + v) + '.') + ') / 2'))
elif (key.lower() == 'random'):
ds1 += dsmiss.format(v, v, (((((((((('(&imp_max_' + v) + '.') + ' - ') + '&imp_min_') + v) + '.') + ') * ranuni(0)') + '+ &imp_min_') + v) + '.'))
else:
raise SyntaxError('This should not happen!!!!')
else:
for v in values:
sql += (sqlsel % (key, v))
sqlinto += ((' :imp_' + v) + ',\n')
if (key.lower == 'mode'):
modesql += (modeq % (v, v, (((self.libref + '.') + self.table) + self._dsopts()), v, v, v))
if (varListType.get(v.upper()) == 'N'):
ds1 += dsmiss.format(v, v, (('&imp_' + v) + '.'))
else:
ds1 += dsmiss.format(v, v, (('"&imp_' + v) + '."'))
if (len(sql) > 20):
sql = ((((((((sql.rstrip(', \n') + '\n') + sqlinto.rstrip(', \n')) + '\n from ') + self.libref) + '.') + self.table) + self._dsopts()) + ';\nquit;\n')
else:
sql =
ds1 += 'run;\n'
if self.sas.nosub:
print(((modesql + sql) + ds1))
return None
ll = self.sas.submit(((modesql + sql) + ds1))
return self.sas.sasdata(out_table, libref=out_libref, results=self.results, dsopts=self._dsopts()) |
def sort(self, by: str, out: object='', **kwargs) -> 'SASdata':
"\n Sort the SAS Data Set\n\n :param by: REQUIRED variable to sort by (BY <DESCENDING> variable-1 <<DESCENDING> variable-2 ...>;)\n :param out: OPTIONAL takes either a string 'libref.table' or 'table' which will go to WORK or USER\n if assigned or a sas data object'' will sort in place if allowed\n :param kwargs:\n :return: SASdata object if out= not specified, or a new SASdata object for out= when specified\n\n :Example:\n\n #. wkcars.sort('type')\n #. wkcars2 = sas.sasdata('cars2')\n #. wkcars.sort('cylinders', wkcars2)\n #. cars2=cars.sort('DESCENDING origin', out='foobar')\n #. cars.sort('type').head()\n #. stat_results = stat.reg(model='horsepower = Cylinders EngineSize', by='type', data=wkcars.sort('type'))\n #. stat_results2 = stat.reg(model='horsepower = Cylinders EngineSize', by='type', data=wkcars.sort('type','work.cars'))\n "
outstr = ''
options = ''
if out:
if isinstance(out, str):
fn = out.partition('.')
if (fn[1] == '.'):
libref = fn[0]
table = fn[2]
outstr = ('out=%s.%s' % (libref, table))
else:
libref = ''
table = fn[0]
outstr = ('out=' + table)
else:
libref = out.libref
table = out.table
outstr = ('out=%s.%s' % (out.libref, out.table))
if ('options' in kwargs):
options = kwargs['options']
code = ('proc sort data=%s.%s%s %s %s ;\n' % (self.libref, self.table, self._dsopts(), outstr, options))
code += ('by %s;' % by)
code += 'run\n;'
runcode = True
if self.sas.nosub:
print(code)
runcode = False
ll = self._is_valid()
if ll:
runcode = False
if runcode:
ll = self.sas.submit(code, 'text')
elog = []
for line in ll['LOG'].splitlines():
if line[self.sas.logoffset:].startswith('ERROR'):
elog.append(line)
if len(elog):
raise RuntimeError('\n'.join(elog))
if out:
if (not isinstance(out, str)):
return out
else:
return self.sas.sasdata(table, libref, self.results)
else:
return self | 3,380,481,552,454,117,400 | Sort the SAS Data Set
:param by: REQUIRED variable to sort by (BY <DESCENDING> variable-1 <<DESCENDING> variable-2 ...>;)
:param out: OPTIONAL takes either a string 'libref.table' or 'table' which will go to WORK or USER
if assigned or a sas data object'' will sort in place if allowed
:param kwargs:
:return: SASdata object if out= not specified, or a new SASdata object for out= when specified
:Example:
#. wkcars.sort('type')
#. wkcars2 = sas.sasdata('cars2')
#. wkcars.sort('cylinders', wkcars2)
#. cars2=cars.sort('DESCENDING origin', out='foobar')
#. cars.sort('type').head()
#. stat_results = stat.reg(model='horsepower = Cylinders EngineSize', by='type', data=wkcars.sort('type'))
#. stat_results2 = stat.reg(model='horsepower = Cylinders EngineSize', by='type', data=wkcars.sort('type','work.cars')) | saspy/sasdata.py | sort | kjnh10/saspy | python | def sort(self, by: str, out: object=, **kwargs) -> 'SASdata':
"\n Sort the SAS Data Set\n\n :param by: REQUIRED variable to sort by (BY <DESCENDING> variable-1 <<DESCENDING> variable-2 ...>;)\n :param out: OPTIONAL takes either a string 'libref.table' or 'table' which will go to WORK or USER\n if assigned or a sas data object will sort in place if allowed\n :param kwargs:\n :return: SASdata object if out= not specified, or a new SASdata object for out= when specified\n\n :Example:\n\n #. wkcars.sort('type')\n #. wkcars2 = sas.sasdata('cars2')\n #. wkcars.sort('cylinders', wkcars2)\n #. cars2=cars.sort('DESCENDING origin', out='foobar')\n #. cars.sort('type').head()\n #. stat_results = stat.reg(model='horsepower = Cylinders EngineSize', by='type', data=wkcars.sort('type'))\n #. stat_results2 = stat.reg(model='horsepower = Cylinders EngineSize', by='type', data=wkcars.sort('type','work.cars'))\n "
outstr =
options =
if out:
if isinstance(out, str):
fn = out.partition('.')
if (fn[1] == '.'):
libref = fn[0]
table = fn[2]
outstr = ('out=%s.%s' % (libref, table))
else:
libref =
table = fn[0]
outstr = ('out=' + table)
else:
libref = out.libref
table = out.table
outstr = ('out=%s.%s' % (out.libref, out.table))
if ('options' in kwargs):
options = kwargs['options']
code = ('proc sort data=%s.%s%s %s %s ;\n' % (self.libref, self.table, self._dsopts(), outstr, options))
code += ('by %s;' % by)
code += 'run\n;'
runcode = True
if self.sas.nosub:
print(code)
runcode = False
ll = self._is_valid()
if ll:
runcode = False
if runcode:
ll = self.sas.submit(code, 'text')
elog = []
for line in ll['LOG'].splitlines():
if line[self.sas.logoffset:].startswith('ERROR'):
elog.append(line)
if len(elog):
raise RuntimeError('\n'.join(elog))
if out:
if (not isinstance(out, str)):
return out
else:
return self.sas.sasdata(table, libref, self.results)
else:
return self |
def add_vars(self, vars: dict, out: object=None, **kwargs) -> 'SASLOG':
"\n Copy table to itesf, or to 'out=' table and add any vars if you want\n\n :param vars: REQUIRED dictionayr of variable names (keys) and assignment statement (values)\n to maintain variable order use collections.OrderedDict Assignment statements must be valid \n SAS assignment expressions.\n :param out: OPTIONAL takes a SASdata Object you create ahead of time. If not specified, replaces the existing table\n and the current SAS data object still refers to the replacement table.\n :param kwargs:\n :return: SAS Log showing what happened\n\n :Example:\n\n #. cars = sas.sasdata('cars', 'sashelp') \n #. wkcars = sas.sasdata('cars') \n #. cars.add_vars({'PW_ratio': 'weight / horsepower', 'Overhang' : 'length - wheelbase'}, wkcars)\n #. wkcars.head()\n "
if (out is not None):
if (not isinstance(out, SASdata)):
print('out= needs to be a SASdata object')
return None
else:
outtab = (((out.libref + '.') + out.table) + out._dsopts())
else:
outtab = (((self.libref + '.') + self.table) + self._dsopts())
code = ((((((('data ' + outtab) + '; set ') + self.libref) + '.') + self.table) + self._dsopts()) + ';\n')
for key in vars.keys():
code += (((key + ' = ') + vars[key]) + ';\n')
code += '; run;'
if self.sas.nosub:
print(code)
return
ll = self._is_valid()
if (not ll):
ll = self.sas._io.submit(code, 'text')
if (not self.sas.batch):
print(ll['LOG'])
else:
return ll | 781,591,339,113,170,600 | Copy table to itesf, or to 'out=' table and add any vars if you want
:param vars: REQUIRED dictionayr of variable names (keys) and assignment statement (values)
to maintain variable order use collections.OrderedDict Assignment statements must be valid
SAS assignment expressions.
:param out: OPTIONAL takes a SASdata Object you create ahead of time. If not specified, replaces the existing table
and the current SAS data object still refers to the replacement table.
:param kwargs:
:return: SAS Log showing what happened
:Example:
#. cars = sas.sasdata('cars', 'sashelp')
#. wkcars = sas.sasdata('cars')
#. cars.add_vars({'PW_ratio': 'weight / horsepower', 'Overhang' : 'length - wheelbase'}, wkcars)
#. wkcars.head() | saspy/sasdata.py | add_vars | kjnh10/saspy | python | def add_vars(self, vars: dict, out: object=None, **kwargs) -> 'SASLOG':
"\n Copy table to itesf, or to 'out=' table and add any vars if you want\n\n :param vars: REQUIRED dictionayr of variable names (keys) and assignment statement (values)\n to maintain variable order use collections.OrderedDict Assignment statements must be valid \n SAS assignment expressions.\n :param out: OPTIONAL takes a SASdata Object you create ahead of time. If not specified, replaces the existing table\n and the current SAS data object still refers to the replacement table.\n :param kwargs:\n :return: SAS Log showing what happened\n\n :Example:\n\n #. cars = sas.sasdata('cars', 'sashelp') \n #. wkcars = sas.sasdata('cars') \n #. cars.add_vars({'PW_ratio': 'weight / horsepower', 'Overhang' : 'length - wheelbase'}, wkcars)\n #. wkcars.head()\n "
if (out is not None):
if (not isinstance(out, SASdata)):
print('out= needs to be a SASdata object')
return None
else:
outtab = (((out.libref + '.') + out.table) + out._dsopts())
else:
outtab = (((self.libref + '.') + self.table) + self._dsopts())
code = ((((((('data ' + outtab) + '; set ') + self.libref) + '.') + self.table) + self._dsopts()) + ';\n')
for key in vars.keys():
code += (((key + ' = ') + vars[key]) + ';\n')
code += '; run;'
if self.sas.nosub:
print(code)
return
ll = self._is_valid()
if (not ll):
ll = self.sas._io.submit(code, 'text')
if (not self.sas.batch):
print(ll['LOG'])
else:
return ll |
def assessModel(self, target: str, prediction: str, nominal: bool=True, event: str='', **kwargs):
'\n This method will calculate assessment measures using the SAS AA_Model_Eval Macro used for SAS Enterprise Miner.\n Not all datasets can be assessed. This is designed for scored data that includes a target and prediction columns\n TODO: add code example of build, score, and then assess\n\n :param target: string that represents the target variable in the data\n :param prediction: string that represents the numeric prediction column in the data. For nominal targets this should a probability between (0,1).\n :param nominal: boolean to indicate if the Target Variable is nominal because the assessment measures are different.\n :param event: string which indicates which value of the nominal target variable is the event vs non-event\n :param kwargs:\n :return: SAS result object\n '
self.sas.submit('%aamodel;')
objtype = 'datastep'
objname = ('{s:{c}^{n}}'.format(s=self.table[:3], n=3, c='_') + self.sas._objcnt())
code = '%macro proccall(d);\n'
score_table = str(((self.libref + '.') + self.table))
binstats = str(((objname + '.') + 'ASSESSMENTSTATISTICS'))
out = str(((objname + '.') + 'ASSESSMENTBINSTATISTICS'))
level = 'interval'
if nominal:
level = 'class'
try:
if (len(event) < 1):
raise Exception(event)
except Exception:
print('No event was specified for a nominal target. Here are possible options:\n')
event_code = ('proc hpdmdb data=%s.%s %s classout=work._DMDBCLASSTARGET(keep=name nraw craw level frequency nmisspercent);' % (self.libref, self.table, self._dsopts()))
event_code += ('\nclass %s ; \nrun;' % target)
event_code += ("data _null_; set work._DMDBCLASSTARGET; where ^(NRAW eq . and CRAW eq '') and lowcase(name)=lowcase('%s');" % target)
ec = self.sas._io.submit(event_code)
HTML(ec['LST'])
if nominal:
code += ('%%aa_model_eval(DATA=%s%s, TARGET=%s, VAR=%s, level=%s, BINSTATS=%s, bins=100, out=%s, EVENT=%s);' % (score_table, self._dsopts(), target, prediction, level, binstats, out, event))
else:
code += ('%%aa_model_eval(DATA=%s%s, TARGET=%s, VAR=%s, level=%s, BINSTATS=%s, bins=100, out=%s);' % (score_table, self._dsopts(), target, prediction, level, binstats, out))
rename_char = '\n data {0};\n set {0};\n if level in ("INTERVAL", "INT") then do;\n rename _sse_ = SumSquaredError\n _div_ = Divsor\n _ASE_ = AverageSquaredError\n _RASE_ = RootAverageSquaredError\n _MEANP_ = MeanPredictionValue\n _STDP_ = StandardDeviationPrediction\n _CVP_ = CoefficientVariationPrediction;\n end;\n else do;\n rename CR = MaxClassificationRate\n KSCut = KSCutOff\n CRDEPTH = MaxClassificationDepth\n MDepth = MedianClassificationDepth\n MCut = MedianEventDetectionCutOff\n CCut = ClassificationCutOff\n _misc_ = MisClassificationRate;\n end;\n run;\n '
code += rename_char.format(binstats)
if nominal:
graphics = '\n ODS PROCLABEL=\'ERRORPLOT\' ;\n proc sgplot data={0};\n title "Error and Correct rate by Depth";\n series x=depth y=correct_rate;\n series x=depth y=error_rate;\n yaxis label="Percentage" grid;\n run;\n /* roc chart */\n ODS PROCLABEL=\'ROCPLOT\' ;\n\n proc sgplot data={0};\n title "ROC Curve";\n series x=one_minus_specificity y=sensitivity;\n yaxis grid;\n run;\n /* Lift and Cumulative Lift */\n ODS PROCLABEL=\'LIFTPLOT\' ;\n proc sgplot data={0};\n Title "Lift and Cumulative Lift";\n series x=depth y=c_lift;\n series x=depth y=lift;\n yaxis grid;\n run;\n '
code += graphics.format(out)
code += 'run; quit; %mend;\n'
code += ('%%mangobj(%s,%s,%s);' % (objname, objtype, self.table))
if self.sas.nosub:
print(code)
return
ll = self.sas.submit(code, 'text')
obj1 = sp2.SASProcCommons._objectmethods(self, objname)
return sp2.SASresults(obj1, self.sas, objname, self.sas.nosub, ll['LOG']) | 3,050,446,093,375,741,400 | This method will calculate assessment measures using the SAS AA_Model_Eval Macro used for SAS Enterprise Miner.
Not all datasets can be assessed. This is designed for scored data that includes a target and prediction columns
TODO: add code example of build, score, and then assess
:param target: string that represents the target variable in the data
:param prediction: string that represents the numeric prediction column in the data. For nominal targets this should a probability between (0,1).
:param nominal: boolean to indicate if the Target Variable is nominal because the assessment measures are different.
:param event: string which indicates which value of the nominal target variable is the event vs non-event
:param kwargs:
:return: SAS result object | saspy/sasdata.py | assessModel | kjnh10/saspy | python | def assessModel(self, target: str, prediction: str, nominal: bool=True, event: str=, **kwargs):
'\n This method will calculate assessment measures using the SAS AA_Model_Eval Macro used for SAS Enterprise Miner.\n Not all datasets can be assessed. This is designed for scored data that includes a target and prediction columns\n TODO: add code example of build, score, and then assess\n\n :param target: string that represents the target variable in the data\n :param prediction: string that represents the numeric prediction column in the data. For nominal targets this should a probability between (0,1).\n :param nominal: boolean to indicate if the Target Variable is nominal because the assessment measures are different.\n :param event: string which indicates which value of the nominal target variable is the event vs non-event\n :param kwargs:\n :return: SAS result object\n '
self.sas.submit('%aamodel;')
objtype = 'datastep'
objname = ('{s:{c}^{n}}'.format(s=self.table[:3], n=3, c='_') + self.sas._objcnt())
code = '%macro proccall(d);\n'
score_table = str(((self.libref + '.') + self.table))
binstats = str(((objname + '.') + 'ASSESSMENTSTATISTICS'))
out = str(((objname + '.') + 'ASSESSMENTBINSTATISTICS'))
level = 'interval'
if nominal:
level = 'class'
try:
if (len(event) < 1):
raise Exception(event)
except Exception:
print('No event was specified for a nominal target. Here are possible options:\n')
event_code = ('proc hpdmdb data=%s.%s %s classout=work._DMDBCLASSTARGET(keep=name nraw craw level frequency nmisspercent);' % (self.libref, self.table, self._dsopts()))
event_code += ('\nclass %s ; \nrun;' % target)
event_code += ("data _null_; set work._DMDBCLASSTARGET; where ^(NRAW eq . and CRAW eq ) and lowcase(name)=lowcase('%s');" % target)
ec = self.sas._io.submit(event_code)
HTML(ec['LST'])
if nominal:
code += ('%%aa_model_eval(DATA=%s%s, TARGET=%s, VAR=%s, level=%s, BINSTATS=%s, bins=100, out=%s, EVENT=%s);' % (score_table, self._dsopts(), target, prediction, level, binstats, out, event))
else:
code += ('%%aa_model_eval(DATA=%s%s, TARGET=%s, VAR=%s, level=%s, BINSTATS=%s, bins=100, out=%s);' % (score_table, self._dsopts(), target, prediction, level, binstats, out))
rename_char = '\n data {0};\n set {0};\n if level in ("INTERVAL", "INT") then do;\n rename _sse_ = SumSquaredError\n _div_ = Divsor\n _ASE_ = AverageSquaredError\n _RASE_ = RootAverageSquaredError\n _MEANP_ = MeanPredictionValue\n _STDP_ = StandardDeviationPrediction\n _CVP_ = CoefficientVariationPrediction;\n end;\n else do;\n rename CR = MaxClassificationRate\n KSCut = KSCutOff\n CRDEPTH = MaxClassificationDepth\n MDepth = MedianClassificationDepth\n MCut = MedianEventDetectionCutOff\n CCut = ClassificationCutOff\n _misc_ = MisClassificationRate;\n end;\n run;\n '
code += rename_char.format(binstats)
if nominal:
graphics = '\n ODS PROCLABEL=\'ERRORPLOT\' ;\n proc sgplot data={0};\n title "Error and Correct rate by Depth";\n series x=depth y=correct_rate;\n series x=depth y=error_rate;\n yaxis label="Percentage" grid;\n run;\n /* roc chart */\n ODS PROCLABEL=\'ROCPLOT\' ;\n\n proc sgplot data={0};\n title "ROC Curve";\n series x=one_minus_specificity y=sensitivity;\n yaxis grid;\n run;\n /* Lift and Cumulative Lift */\n ODS PROCLABEL=\'LIFTPLOT\' ;\n proc sgplot data={0};\n Title "Lift and Cumulative Lift";\n series x=depth y=c_lift;\n series x=depth y=lift;\n yaxis grid;\n run;\n '
code += graphics.format(out)
code += 'run; quit; %mend;\n'
code += ('%%mangobj(%s,%s,%s);' % (objname, objtype, self.table))
if self.sas.nosub:
print(code)
return
ll = self.sas.submit(code, 'text')
obj1 = sp2.SASProcCommons._objectmethods(self, objname)
return sp2.SASresults(obj1, self.sas, objname, self.sas.nosub, ll['LOG']) |
def to_csv(self, file: str, opts: dict=None) -> str:
'\n This method will export a SAS Data Set to a file in CSV format.\n\n :param file: the OS filesystem path of the file to be created (exported from this SAS Data Set)\n :return:\n '
opts = (opts if (opts is not None) else {})
ll = self._is_valid()
if ll:
if (not self.sas.batch):
print(ll['LOG'])
else:
return ll
else:
return self.sas.write_csv(file, self.table, self.libref, self.dsopts, opts) | 8,813,894,262,514,378,000 | This method will export a SAS Data Set to a file in CSV format.
:param file: the OS filesystem path of the file to be created (exported from this SAS Data Set)
:return: | saspy/sasdata.py | to_csv | kjnh10/saspy | python | def to_csv(self, file: str, opts: dict=None) -> str:
'\n This method will export a SAS Data Set to a file in CSV format.\n\n :param file: the OS filesystem path of the file to be created (exported from this SAS Data Set)\n :return:\n '
opts = (opts if (opts is not None) else {})
ll = self._is_valid()
if ll:
if (not self.sas.batch):
print(ll['LOG'])
else:
return ll
else:
return self.sas.write_csv(file, self.table, self.libref, self.dsopts, opts) |
def score(self, file: str='', code: str='', out: 'SASdata'=None) -> 'SASdata':
'\n This method is meant to update a SAS Data object with a model score file.\n\n :param file: a file reference to the SAS score code\n :param code: a string of the valid SAS score code\n :param out: Where to the write the file. Defaults to update in place\n :return: The Scored SAS Data object.\n '
if (out is not None):
outTable = out.table
outLibref = out.libref
else:
outTable = self.table
outLibref = self.libref
codestr = code
code = ('data %s.%s%s;' % (outLibref, outTable, self._dsopts()))
code += ('set %s.%s%s;' % (self.libref, self.table, self._dsopts()))
if (len(file) > 0):
code += ('%%include "%s";' % file)
else:
code += ('%s;' % codestr)
code += 'run;'
if self.sas.nosub:
print(code)
return None
ll = self._is_valid()
if (not ll):
html = self.HTML
self.HTML = 1
ll = self.sas._io.submit(code)
self.HTML = html
if (not self.sas.batch):
self.sas.DISPLAY(self.sas.HTML(ll['LST']))
else:
return ll | 8,542,006,771,395,692,000 | This method is meant to update a SAS Data object with a model score file.
:param file: a file reference to the SAS score code
:param code: a string of the valid SAS score code
:param out: Where to the write the file. Defaults to update in place
:return: The Scored SAS Data object. | saspy/sasdata.py | score | kjnh10/saspy | python | def score(self, file: str=, code: str=, out: 'SASdata'=None) -> 'SASdata':
'\n This method is meant to update a SAS Data object with a model score file.\n\n :param file: a file reference to the SAS score code\n :param code: a string of the valid SAS score code\n :param out: Where to the write the file. Defaults to update in place\n :return: The Scored SAS Data object.\n '
if (out is not None):
outTable = out.table
outLibref = out.libref
else:
outTable = self.table
outLibref = self.libref
codestr = code
code = ('data %s.%s%s;' % (outLibref, outTable, self._dsopts()))
code += ('set %s.%s%s;' % (self.libref, self.table, self._dsopts()))
if (len(file) > 0):
code += ('%%include "%s";' % file)
else:
code += ('%s;' % codestr)
code += 'run;'
if self.sas.nosub:
print(code)
return None
ll = self._is_valid()
if (not ll):
html = self.HTML
self.HTML = 1
ll = self.sas._io.submit(code)
self.HTML = html
if (not self.sas.batch):
self.sas.DISPLAY(self.sas.HTML(ll['LST']))
else:
return ll |
def to_frame(self, **kwargs) -> 'pd.DataFrame':
"\n Export this SAS Data Set to a Pandas Data Frame\n\n :param kwargs:\n :return: Pandas data frame\n :rtype: 'pd.DataFrame'\n "
return self.to_df(**kwargs) | -7,170,669,116,586,191,000 | Export this SAS Data Set to a Pandas Data Frame
:param kwargs:
:return: Pandas data frame
:rtype: 'pd.DataFrame' | saspy/sasdata.py | to_frame | kjnh10/saspy | python | def to_frame(self, **kwargs) -> 'pd.DataFrame':
"\n Export this SAS Data Set to a Pandas Data Frame\n\n :param kwargs:\n :return: Pandas data frame\n :rtype: 'pd.DataFrame'\n "
return self.to_df(**kwargs) |
def to_df(self, method: str='MEMORY', **kwargs) -> 'pd.DataFrame':
'\n Export this SAS Data Set to a Pandas Data Frame\n\n :param method: defaults to MEMORY; the original method. CSV is the other choice which uses an intermediary csv file; faster for large data\n :param kwargs:\n :return: Pandas data frame\n '
ll = self._is_valid()
if ll:
print(ll['LOG'])
return None
else:
if self.sas.sascfg.pandas:
raise type(self.sas.sascfg.pandas)(self.sas.sascfg.pandas.msg)
return self.sas.sasdata2dataframe(self.table, self.libref, self.dsopts, method, **kwargs) | 7,848,463,222,761,535,000 | Export this SAS Data Set to a Pandas Data Frame
:param method: defaults to MEMORY; the original method. CSV is the other choice which uses an intermediary csv file; faster for large data
:param kwargs:
:return: Pandas data frame | saspy/sasdata.py | to_df | kjnh10/saspy | python | def to_df(self, method: str='MEMORY', **kwargs) -> 'pd.DataFrame':
'\n Export this SAS Data Set to a Pandas Data Frame\n\n :param method: defaults to MEMORY; the original method. CSV is the other choice which uses an intermediary csv file; faster for large data\n :param kwargs:\n :return: Pandas data frame\n '
ll = self._is_valid()
if ll:
print(ll['LOG'])
return None
else:
if self.sas.sascfg.pandas:
raise type(self.sas.sascfg.pandas)(self.sas.sascfg.pandas.msg)
return self.sas.sasdata2dataframe(self.table, self.libref, self.dsopts, method, **kwargs) |
def to_df_CSV(self, tempfile: str=None, tempkeep: bool=False, **kwargs) -> 'pd.DataFrame':
"\n Export this SAS Data Set to a Pandas Data Frame via CSV file\n\n :param tempfile: [optional] an OS path for a file to use for the local CSV file; default it a temporary file that's cleaned up\n :param tempkeep: if you specify your own file to use with tempfile=, this controls whether it's cleaned up after using it\n :param kwargs:\n :return: Pandas data frame\n :rtype: 'pd.DataFrame'\n "
return self.to_df(method='CSV', tempfile=tempfile, tempkeep=tempkeep, **kwargs) | 1,314,355,456,754,178,600 | Export this SAS Data Set to a Pandas Data Frame via CSV file
:param tempfile: [optional] an OS path for a file to use for the local CSV file; default it a temporary file that's cleaned up
:param tempkeep: if you specify your own file to use with tempfile=, this controls whether it's cleaned up after using it
:param kwargs:
:return: Pandas data frame
:rtype: 'pd.DataFrame' | saspy/sasdata.py | to_df_CSV | kjnh10/saspy | python | def to_df_CSV(self, tempfile: str=None, tempkeep: bool=False, **kwargs) -> 'pd.DataFrame':
"\n Export this SAS Data Set to a Pandas Data Frame via CSV file\n\n :param tempfile: [optional] an OS path for a file to use for the local CSV file; default it a temporary file that's cleaned up\n :param tempkeep: if you specify your own file to use with tempfile=, this controls whether it's cleaned up after using it\n :param kwargs:\n :return: Pandas data frame\n :rtype: 'pd.DataFrame'\n "
return self.to_df(method='CSV', tempfile=tempfile, tempkeep=tempkeep, **kwargs) |
def to_json(self, pretty: bool=False, sastag: bool=False, **kwargs) -> str:
'\n Export this SAS Data Set to a JSON Object\n PROC JSON documentation: http://go.documentation.sas.com/?docsetId=proc&docsetVersion=9.4&docsetTarget=p06hstivs0b3hsn1cb4zclxukkut.htm&locale=en\n\n :param pretty: boolean False return JSON on one line True returns formatted JSON\n :param sastag: include SAS meta tags\n :param kwargs:\n :return: JSON str\n '
code = 'filename file1 temp;\n'
code += 'proc json out=file1'
if pretty:
code += ' pretty '
if (not sastag):
code += ' nosastags '
code += (';\n export %s.%s %s;\n run;' % (self.libref, self.table, self._dsopts()))
if self.sas.nosub:
print(code)
return None
ll = self._is_valid()
runcode = True
if ll:
runcode = False
if runcode:
ll = self.sas.submit(code, 'text')
elog = []
fpath = ''
for line in ll['LOG'].splitlines():
if line[self.sas.logoffset:].startswith('JSONFilePath:'):
fpath = line[14:]
if line[self.sas.logoffset:].startswith('ERROR'):
elog.append(line)
if len(elog):
raise RuntimeError('\n'.join(elog))
if len(fpath):
with open(fpath, 'r') as myfile:
json_str = myfile.read()
return json_str | -1,177,426,332,884,585,200 | Export this SAS Data Set to a JSON Object
PROC JSON documentation: http://go.documentation.sas.com/?docsetId=proc&docsetVersion=9.4&docsetTarget=p06hstivs0b3hsn1cb4zclxukkut.htm&locale=en
:param pretty: boolean False return JSON on one line True returns formatted JSON
:param sastag: include SAS meta tags
:param kwargs:
:return: JSON str | saspy/sasdata.py | to_json | kjnh10/saspy | python | def to_json(self, pretty: bool=False, sastag: bool=False, **kwargs) -> str:
'\n Export this SAS Data Set to a JSON Object\n PROC JSON documentation: http://go.documentation.sas.com/?docsetId=proc&docsetVersion=9.4&docsetTarget=p06hstivs0b3hsn1cb4zclxukkut.htm&locale=en\n\n :param pretty: boolean False return JSON on one line True returns formatted JSON\n :param sastag: include SAS meta tags\n :param kwargs:\n :return: JSON str\n '
code = 'filename file1 temp;\n'
code += 'proc json out=file1'
if pretty:
code += ' pretty '
if (not sastag):
code += ' nosastags '
code += (';\n export %s.%s %s;\n run;' % (self.libref, self.table, self._dsopts()))
if self.sas.nosub:
print(code)
return None
ll = self._is_valid()
runcode = True
if ll:
runcode = False
if runcode:
ll = self.sas.submit(code, 'text')
elog = []
fpath =
for line in ll['LOG'].splitlines():
if line[self.sas.logoffset:].startswith('JSONFilePath:'):
fpath = line[14:]
if line[self.sas.logoffset:].startswith('ERROR'):
elog.append(line)
if len(elog):
raise RuntimeError('\n'.join(elog))
if len(fpath):
with open(fpath, 'r') as myfile:
json_str = myfile.read()
return json_str |
def heatmap(self, x: str, y: str, options: str='', title: str='', label: str='') -> object:
'\n Documentation link: http://support.sas.com/documentation/cdl/en/grstatproc/67909/HTML/default/viewer.htm#n0w12m4cn1j5c6n12ak64u1rys4w.htm\n\n :param x: x variable\n :param y: y variable\n :param options: display options (string)\n :param title: graph title\n :param label:\n :return:\n '
code = ('proc sgplot data=%s.%s %s;' % (self.libref, self.table, self._dsopts()))
if len(options):
code += ("\n\theatmap x='%s'n y='%s'n / %s;" % (x, y, options))
else:
code += ("\n\theatmap x='%s'n y='%s'n;" % (x, y))
if (len(label) > 0):
code += ((" LegendLABEL='" + label) + "'")
code += ';\n'
if (len(title) > 0):
code += ("\ttitle '%s';\n" % title)
code += 'run;\ntitle;'
if self.sas.nosub:
print(code)
return
ll = self._is_valid()
if (not ll):
html = self.HTML
self.HTML = 1
ll = self.sas._io.submit(code)
self.HTML = html
if (not self.sas.batch):
self.sas.DISPLAY(self.sas.HTML(ll['LST']))
else:
return ll | -2,740,617,728,054,231,600 | Documentation link: http://support.sas.com/documentation/cdl/en/grstatproc/67909/HTML/default/viewer.htm#n0w12m4cn1j5c6n12ak64u1rys4w.htm
:param x: x variable
:param y: y variable
:param options: display options (string)
:param title: graph title
:param label:
:return: | saspy/sasdata.py | heatmap | kjnh10/saspy | python | def heatmap(self, x: str, y: str, options: str=, title: str=, label: str=) -> object:
'\n Documentation link: http://support.sas.com/documentation/cdl/en/grstatproc/67909/HTML/default/viewer.htm#n0w12m4cn1j5c6n12ak64u1rys4w.htm\n\n :param x: x variable\n :param y: y variable\n :param options: display options (string)\n :param title: graph title\n :param label:\n :return:\n '
code = ('proc sgplot data=%s.%s %s;' % (self.libref, self.table, self._dsopts()))
if len(options):
code += ("\n\theatmap x='%s'n y='%s'n / %s;" % (x, y, options))
else:
code += ("\n\theatmap x='%s'n y='%s'n;" % (x, y))
if (len(label) > 0):
code += ((" LegendLABEL='" + label) + "'")
code += ';\n'
if (len(title) > 0):
code += ("\ttitle '%s';\n" % title)
code += 'run;\ntitle;'
if self.sas.nosub:
print(code)
return
ll = self._is_valid()
if (not ll):
html = self.HTML
self.HTML = 1
ll = self.sas._io.submit(code)
self.HTML = html
if (not self.sas.batch):
self.sas.DISPLAY(self.sas.HTML(ll['LST']))
else:
return ll |
def hist(self, var: str, title: str='', label: str='') -> object:
'\n This method requires a numeric column (use the contents method to see column types) and generates a histogram.\n\n :param var: the NUMERIC variable (column) you want to plot\n :param title: an optional Title for the chart\n :param label: LegendLABEL= value for sgplot\n :return:\n '
code = (((('proc sgplot data=' + self.libref) + '.') + self.table) + self._dsopts())
code += ((";\n\thistogram '" + var) + "'n / scale=count")
if (len(label) > 0):
code += ((" LegendLABEL='" + label) + "'")
code += ';\n'
if (len(title) > 0):
code += (('\ttitle "' + title) + '";\n')
code += ((("\tdensity '" + var) + "'n;\nrun;\n") + 'title;')
if self.sas.nosub:
print(code)
return
ll = self._is_valid()
if (not ll):
html = self.HTML
self.HTML = 1
ll = self.sas._io.submit(code)
self.HTML = html
if (not self.sas.batch):
self.sas.DISPLAY(self.sas.HTML(ll['LST']))
else:
return ll | -8,487,482,241,291,482,000 | This method requires a numeric column (use the contents method to see column types) and generates a histogram.
:param var: the NUMERIC variable (column) you want to plot
:param title: an optional Title for the chart
:param label: LegendLABEL= value for sgplot
:return: | saspy/sasdata.py | hist | kjnh10/saspy | python | def hist(self, var: str, title: str=, label: str=) -> object:
'\n This method requires a numeric column (use the contents method to see column types) and generates a histogram.\n\n :param var: the NUMERIC variable (column) you want to plot\n :param title: an optional Title for the chart\n :param label: LegendLABEL= value for sgplot\n :return:\n '
code = (((('proc sgplot data=' + self.libref) + '.') + self.table) + self._dsopts())
code += ((";\n\thistogram '" + var) + "'n / scale=count")
if (len(label) > 0):
code += ((" LegendLABEL='" + label) + "'")
code += ';\n'
if (len(title) > 0):
code += (('\ttitle "' + title) + '";\n')
code += ((("\tdensity '" + var) + "'n;\nrun;\n") + 'title;')
if self.sas.nosub:
print(code)
return
ll = self._is_valid()
if (not ll):
html = self.HTML
self.HTML = 1
ll = self.sas._io.submit(code)
self.HTML = html
if (not self.sas.batch):
self.sas.DISPLAY(self.sas.HTML(ll['LST']))
else:
return ll |
def top(self, var: str, n: int=10, order: str='freq', title: str='') -> object:
"\n Return the most commonly occuring items (levels)\n\n :param var: the CHAR variable (column) you want to count\n :param n: the top N to be displayed (defaults to 10)\n :param order: default to most common use order='data' to get then in alphbetic order\n :param title: an optional Title for the chart\n :return: Data Table\n "
code = ('proc freq data=%s.%s %s order=%s noprint;' % (self.libref, self.table, self._dsopts(), order))
code += ("\n\ttables '%s'n / out=tmpFreqOut;" % var)
code += '\nrun;'
if (len(title) > 0):
code += (('\ttitle "' + title) + '";\n')
code += ('proc print data=tmpFreqOut(obs=%s); \nrun;' % n)
code += 'title;'
if self.sas.nosub:
print(code)
return
ll = self._is_valid()
if (self.results.upper() == 'PANDAS'):
code = ('proc freq data=%s.%s%s order=%s noprint;' % (self.libref, self.table, self._dsopts(), order))
code += ("\n\ttables '%s'n / out=tmpFreqOut;" % var)
code += '\nrun;'
code += ('\ndata tmpFreqOut; set tmpFreqOut(obs=%s); run;' % n)
return self._returnPD(code, 'tmpFreqOut')
elif self.HTML:
if (not ll):
ll = self.sas._io.submit(code)
if (not self.sas.batch):
self.sas.DISPLAY(self.sas.HTML(ll['LST']))
else:
return ll
else:
if (not ll):
ll = self.sas._io.submit(code, 'text')
if (not self.sas.batch):
print(ll['LST'])
else:
return ll | 4,979,005,287,876,301,000 | Return the most commonly occuring items (levels)
:param var: the CHAR variable (column) you want to count
:param n: the top N to be displayed (defaults to 10)
:param order: default to most common use order='data' to get then in alphbetic order
:param title: an optional Title for the chart
:return: Data Table | saspy/sasdata.py | top | kjnh10/saspy | python | def top(self, var: str, n: int=10, order: str='freq', title: str=) -> object:
"\n Return the most commonly occuring items (levels)\n\n :param var: the CHAR variable (column) you want to count\n :param n: the top N to be displayed (defaults to 10)\n :param order: default to most common use order='data' to get then in alphbetic order\n :param title: an optional Title for the chart\n :return: Data Table\n "
code = ('proc freq data=%s.%s %s order=%s noprint;' % (self.libref, self.table, self._dsopts(), order))
code += ("\n\ttables '%s'n / out=tmpFreqOut;" % var)
code += '\nrun;'
if (len(title) > 0):
code += (('\ttitle "' + title) + '";\n')
code += ('proc print data=tmpFreqOut(obs=%s); \nrun;' % n)
code += 'title;'
if self.sas.nosub:
print(code)
return
ll = self._is_valid()
if (self.results.upper() == 'PANDAS'):
code = ('proc freq data=%s.%s%s order=%s noprint;' % (self.libref, self.table, self._dsopts(), order))
code += ("\n\ttables '%s'n / out=tmpFreqOut;" % var)
code += '\nrun;'
code += ('\ndata tmpFreqOut; set tmpFreqOut(obs=%s); run;' % n)
return self._returnPD(code, 'tmpFreqOut')
elif self.HTML:
if (not ll):
ll = self.sas._io.submit(code)
if (not self.sas.batch):
self.sas.DISPLAY(self.sas.HTML(ll['LST']))
else:
return ll
else:
if (not ll):
ll = self.sas._io.submit(code, 'text')
if (not self.sas.batch):
print(ll['LST'])
else:
return ll |
def bar(self, var: str, title: str='', label: str='') -> object:
'\n This method requires a character column (use the contents method to see column types)\n and generates a bar chart.\n\n :param var: the CHAR variable (column) you want to plot\n :param title: an optional title for the chart\n :param label: LegendLABEL= value for sgplot\n :return: graphic plot\n '
code = (((('proc sgplot data=' + self.libref) + '.') + self.table) + self._dsopts())
code += ((";\n\tvbar '" + var) + "'n")
if (len(label) > 0):
code += ((" / LegendLABEL='" + label) + "'")
code += ';\n'
if (len(title) > 0):
code += (('\ttitle "' + title) + '";\n')
code += 'run;\ntitle;'
if self.sas.nosub:
print(code)
return
ll = self._is_valid()
if (not ll):
html = self.HTML
self.HTML = 1
ll = self.sas._io.submit(code)
self.HTML = html
if (not self.sas.batch):
self.sas.DISPLAY(self.sas.HTML(ll['LST']))
else:
return ll | 8,782,738,664,675,304,000 | This method requires a character column (use the contents method to see column types)
and generates a bar chart.
:param var: the CHAR variable (column) you want to plot
:param title: an optional title for the chart
:param label: LegendLABEL= value for sgplot
:return: graphic plot | saspy/sasdata.py | bar | kjnh10/saspy | python | def bar(self, var: str, title: str=, label: str=) -> object:
'\n This method requires a character column (use the contents method to see column types)\n and generates a bar chart.\n\n :param var: the CHAR variable (column) you want to plot\n :param title: an optional title for the chart\n :param label: LegendLABEL= value for sgplot\n :return: graphic plot\n '
code = (((('proc sgplot data=' + self.libref) + '.') + self.table) + self._dsopts())
code += ((";\n\tvbar '" + var) + "'n")
if (len(label) > 0):
code += ((" / LegendLABEL='" + label) + "'")
code += ';\n'
if (len(title) > 0):
code += (('\ttitle "' + title) + '";\n')
code += 'run;\ntitle;'
if self.sas.nosub:
print(code)
return
ll = self._is_valid()
if (not ll):
html = self.HTML
self.HTML = 1
ll = self.sas._io.submit(code)
self.HTML = html
if (not self.sas.batch):
self.sas.DISPLAY(self.sas.HTML(ll['LST']))
else:
return ll |
def series(self, x: str, y: list, title: str='') -> object:
'\n This method plots a series of x,y coordinates. You can provide a list of y columns for multiple line plots.\n\n :param x: the x axis variable; generally a time or continuous variable.\n :param y: the y axis variable(s), you can specify a single column or a list of columns\n :param title: an optional Title for the chart\n :return: graph object\n '
code = ((((('proc sgplot data=' + self.libref) + '.') + self.table) + self._dsopts()) + ';\n')
if (len(title) > 0):
code += (('\ttitle "' + title) + '";\n')
if isinstance(y, list):
num = len(y)
else:
num = 1
y = [y]
for i in range(num):
code += (((("\tseries x='" + x) + "'n y='") + str(y[i])) + "'n;\n")
code += ('run;\n' + 'title;')
if self.sas.nosub:
print(code)
return
ll = self._is_valid()
if (not ll):
html = self.HTML
self.HTML = 1
ll = self.sas._io.submit(code)
self.HTML = html
if (not self.sas.batch):
self.sas.DISPLAY(self.sas.HTML(ll['LST']))
else:
return ll | -2,095,631,289,854,435,800 | This method plots a series of x,y coordinates. You can provide a list of y columns for multiple line plots.
:param x: the x axis variable; generally a time or continuous variable.
:param y: the y axis variable(s), you can specify a single column or a list of columns
:param title: an optional Title for the chart
:return: graph object | saspy/sasdata.py | series | kjnh10/saspy | python | def series(self, x: str, y: list, title: str=) -> object:
'\n This method plots a series of x,y coordinates. You can provide a list of y columns for multiple line plots.\n\n :param x: the x axis variable; generally a time or continuous variable.\n :param y: the y axis variable(s), you can specify a single column or a list of columns\n :param title: an optional Title for the chart\n :return: graph object\n '
code = ((((('proc sgplot data=' + self.libref) + '.') + self.table) + self._dsopts()) + ';\n')
if (len(title) > 0):
code += (('\ttitle "' + title) + '";\n')
if isinstance(y, list):
num = len(y)
else:
num = 1
y = [y]
for i in range(num):
code += (((("\tseries x='" + x) + "'n y='") + str(y[i])) + "'n;\n")
code += ('run;\n' + 'title;')
if self.sas.nosub:
print(code)
return
ll = self._is_valid()
if (not ll):
html = self.HTML
self.HTML = 1
ll = self.sas._io.submit(code)
self.HTML = html
if (not self.sas.batch):
self.sas.DISPLAY(self.sas.HTML(ll['LST']))
else:
return ll |
def scatter(self, x: str, y: list, title: str='') -> object:
'\n This method plots a scatter of x,y coordinates. You can provide a list of y columns for multiple line plots.\n\n :param x: the x axis variable; generally a time or continuous variable.\n :param y: the y axis variable(s), you can specify a single column or a list of columns\n :param title: an optional Title for the chart\n :return: graph object\n '
code = ((((('proc sgplot data=' + self.libref) + '.') + self.table) + self._dsopts()) + ';\n')
if (len(title) > 0):
code += (('\ttitle "' + title) + '";\n')
if isinstance(y, list):
num = len(y)
else:
num = 1
y = [y]
for i in range(num):
code += (((("\tscatter x='" + x) + "'n y='") + y[i]) + "'n;\n")
code += ('run;\n' + 'title;')
if self.sas.nosub:
print(code)
return
ll = self._is_valid()
if (not ll):
html = self.HTML
self.HTML = 1
ll = self.sas._io.submit(code)
self.HTML = html
if (not self.sas.batch):
self.sas.DISPLAY(self.sas.HTML(ll['LST']))
else:
return ll | -1,922,689,700,356,596,500 | This method plots a scatter of x,y coordinates. You can provide a list of y columns for multiple line plots.
:param x: the x axis variable; generally a time or continuous variable.
:param y: the y axis variable(s), you can specify a single column or a list of columns
:param title: an optional Title for the chart
:return: graph object | saspy/sasdata.py | scatter | kjnh10/saspy | python | def scatter(self, x: str, y: list, title: str=) -> object:
'\n This method plots a scatter of x,y coordinates. You can provide a list of y columns for multiple line plots.\n\n :param x: the x axis variable; generally a time or continuous variable.\n :param y: the y axis variable(s), you can specify a single column or a list of columns\n :param title: an optional Title for the chart\n :return: graph object\n '
code = ((((('proc sgplot data=' + self.libref) + '.') + self.table) + self._dsopts()) + ';\n')
if (len(title) > 0):
code += (('\ttitle "' + title) + '";\n')
if isinstance(y, list):
num = len(y)
else:
num = 1
y = [y]
for i in range(num):
code += (((("\tscatter x='" + x) + "'n y='") + y[i]) + "'n;\n")
code += ('run;\n' + 'title;')
if self.sas.nosub:
print(code)
return
ll = self._is_valid()
if (not ll):
html = self.HTML
self.HTML = 1
ll = self.sas._io.submit(code)
self.HTML = html
if (not self.sas.batch):
self.sas.DISPLAY(self.sas.HTML(ll['LST']))
else:
return ll |
def _invert(f_x, y, x, domain=S.Complexes):
"\n Reduce the complex valued equation ``f(x) = y`` to a set of equations\n ``{g(x) = h_1(y), g(x) = h_2(y), ..., g(x) = h_n(y) }`` where ``g(x)`` is\n a simpler function than ``f(x)``. The return value is a tuple ``(g(x),\n set_h)``, where ``g(x)`` is a function of ``x`` and ``set_h`` is\n the set of function ``{h_1(y), h_2(y), ..., h_n(y)}``.\n Here, ``y`` is not necessarily a symbol.\n\n The ``set_h`` contains the functions along with the information\n about their domain in which they are valid, through set\n operations. For instance, if ``y = Abs(x) - n``, is inverted\n in the real domain, then, the ``set_h`` doesn't simply return\n `{-n, n}`, as the nature of `n` is unknown; rather it will return:\n `Intersection([0, oo) {n}) U Intersection((-oo, 0], {-n})`\n\n By default, the complex domain is used but note that inverting even\n seemingly simple functions like ``exp(x)`` can give very different\n result in the complex domain than are obtained in the real domain.\n (In the case of ``exp(x)``, the inversion via ``log`` is multi-valued\n in the complex domain, having infinitely many branches.)\n\n If you are working with real values only (or you are not sure which\n function to use) you should probably use set the domain to\n ``S.Reals`` (or use `invert\\_real` which does that automatically).\n\n\n Examples\n ========\n\n >>> from sympy.solvers.solveset import invert_complex, invert_real\n >>> from sympy.abc import x, y\n >>> from sympy import exp, log\n\n When does exp(x) == y?\n\n >>> invert_complex(exp(x), y, x)\n (x, ImageSet(Lambda(_n, I*(2*_n*pi + arg(y)) + log(Abs(y))), Integers()))\n >>> invert_real(exp(x), y, x)\n (x, Intersection((-oo, oo), {log(y)}))\n\n When does exp(x) == 1?\n\n >>> invert_complex(exp(x), 1, x)\n (x, ImageSet(Lambda(_n, 2*_n*I*pi), Integers()))\n >>> invert_real(exp(x), 1, x)\n (x, {0})\n\n See Also\n ========\n invert_real, invert_complex\n "
x = sympify(x)
if (not x.is_Symbol):
raise ValueError('x must be a symbol')
f_x = sympify(f_x)
if (not f_x.has(x)):
raise ValueError("Inverse of constant function doesn't exist")
y = sympify(y)
if y.has(x):
raise ValueError('y should be independent of x ')
if domain.is_subset(S.Reals):
(x, s) = _invert_real(f_x, FiniteSet(y), x)
else:
(x, s) = _invert_complex(f_x, FiniteSet(y), x)
return (x, (s.intersection(domain) if isinstance(s, FiniteSet) else s)) | -329,772,719,969,644,500 | Reduce the complex valued equation ``f(x) = y`` to a set of equations
``{g(x) = h_1(y), g(x) = h_2(y), ..., g(x) = h_n(y) }`` where ``g(x)`` is
a simpler function than ``f(x)``. The return value is a tuple ``(g(x),
set_h)``, where ``g(x)`` is a function of ``x`` and ``set_h`` is
the set of function ``{h_1(y), h_2(y), ..., h_n(y)}``.
Here, ``y`` is not necessarily a symbol.
The ``set_h`` contains the functions along with the information
about their domain in which they are valid, through set
operations. For instance, if ``y = Abs(x) - n``, is inverted
in the real domain, then, the ``set_h`` doesn't simply return
`{-n, n}`, as the nature of `n` is unknown; rather it will return:
`Intersection([0, oo) {n}) U Intersection((-oo, 0], {-n})`
By default, the complex domain is used but note that inverting even
seemingly simple functions like ``exp(x)`` can give very different
result in the complex domain than are obtained in the real domain.
(In the case of ``exp(x)``, the inversion via ``log`` is multi-valued
in the complex domain, having infinitely many branches.)
If you are working with real values only (or you are not sure which
function to use) you should probably use set the domain to
``S.Reals`` (or use `invert\_real` which does that automatically).
Examples
========
>>> from sympy.solvers.solveset import invert_complex, invert_real
>>> from sympy.abc import x, y
>>> from sympy import exp, log
When does exp(x) == y?
>>> invert_complex(exp(x), y, x)
(x, ImageSet(Lambda(_n, I*(2*_n*pi + arg(y)) + log(Abs(y))), Integers()))
>>> invert_real(exp(x), y, x)
(x, Intersection((-oo, oo), {log(y)}))
When does exp(x) == 1?
>>> invert_complex(exp(x), 1, x)
(x, ImageSet(Lambda(_n, 2*_n*I*pi), Integers()))
>>> invert_real(exp(x), 1, x)
(x, {0})
See Also
========
invert_real, invert_complex | sympy/solvers/solveset.py | _invert | aktech/sympy | python | def _invert(f_x, y, x, domain=S.Complexes):
"\n Reduce the complex valued equation ``f(x) = y`` to a set of equations\n ``{g(x) = h_1(y), g(x) = h_2(y), ..., g(x) = h_n(y) }`` where ``g(x)`` is\n a simpler function than ``f(x)``. The return value is a tuple ``(g(x),\n set_h)``, where ``g(x)`` is a function of ``x`` and ``set_h`` is\n the set of function ``{h_1(y), h_2(y), ..., h_n(y)}``.\n Here, ``y`` is not necessarily a symbol.\n\n The ``set_h`` contains the functions along with the information\n about their domain in which they are valid, through set\n operations. For instance, if ``y = Abs(x) - n``, is inverted\n in the real domain, then, the ``set_h`` doesn't simply return\n `{-n, n}`, as the nature of `n` is unknown; rather it will return:\n `Intersection([0, oo) {n}) U Intersection((-oo, 0], {-n})`\n\n By default, the complex domain is used but note that inverting even\n seemingly simple functions like ``exp(x)`` can give very different\n result in the complex domain than are obtained in the real domain.\n (In the case of ``exp(x)``, the inversion via ``log`` is multi-valued\n in the complex domain, having infinitely many branches.)\n\n If you are working with real values only (or you are not sure which\n function to use) you should probably use set the domain to\n ``S.Reals`` (or use `invert\\_real` which does that automatically).\n\n\n Examples\n ========\n\n >>> from sympy.solvers.solveset import invert_complex, invert_real\n >>> from sympy.abc import x, y\n >>> from sympy import exp, log\n\n When does exp(x) == y?\n\n >>> invert_complex(exp(x), y, x)\n (x, ImageSet(Lambda(_n, I*(2*_n*pi + arg(y)) + log(Abs(y))), Integers()))\n >>> invert_real(exp(x), y, x)\n (x, Intersection((-oo, oo), {log(y)}))\n\n When does exp(x) == 1?\n\n >>> invert_complex(exp(x), 1, x)\n (x, ImageSet(Lambda(_n, 2*_n*I*pi), Integers()))\n >>> invert_real(exp(x), 1, x)\n (x, {0})\n\n See Also\n ========\n invert_real, invert_complex\n "
x = sympify(x)
if (not x.is_Symbol):
raise ValueError('x must be a symbol')
f_x = sympify(f_x)
if (not f_x.has(x)):
raise ValueError("Inverse of constant function doesn't exist")
y = sympify(y)
if y.has(x):
raise ValueError('y should be independent of x ')
if domain.is_subset(S.Reals):
(x, s) = _invert_real(f_x, FiniteSet(y), x)
else:
(x, s) = _invert_complex(f_x, FiniteSet(y), x)
return (x, (s.intersection(domain) if isinstance(s, FiniteSet) else s)) |
def invert_real(f_x, y, x, domain=S.Reals):
'\n Inverts a real-valued function. Same as _invert, but sets\n the domain to ``S.Reals`` before inverting.\n '
return _invert(f_x, y, x, domain) | 5,623,686,675,543,600,000 | Inverts a real-valued function. Same as _invert, but sets
the domain to ``S.Reals`` before inverting. | sympy/solvers/solveset.py | invert_real | aktech/sympy | python | def invert_real(f_x, y, x, domain=S.Reals):
'\n Inverts a real-valued function. Same as _invert, but sets\n the domain to ``S.Reals`` before inverting.\n '
return _invert(f_x, y, x, domain) |
def _invert_real(f, g_ys, symbol):
'Helper function for _invert.'
if (f == symbol):
return (f, g_ys)
n = Dummy('n', real=True)
if (hasattr(f, 'inverse') and (not isinstance(f, (TrigonometricFunction, HyperbolicFunction)))):
if (len(f.args) > 1):
raise ValueError('Only functions with one argument are supported.')
return _invert_real(f.args[0], imageset(Lambda(n, f.inverse()(n)), g_ys), symbol)
if isinstance(f, Abs):
pos = Interval(0, S.Infinity)
neg = Interval(S.NegativeInfinity, 0)
return _invert_real(f.args[0], Union(imageset(Lambda(n, n), g_ys).intersect(pos), imageset(Lambda(n, (- n)), g_ys).intersect(neg)), symbol)
if f.is_Add:
(g, h) = f.as_independent(symbol)
if (g is not S.Zero):
return _invert_real(h, imageset(Lambda(n, (n - g)), g_ys), symbol)
if f.is_Mul:
(g, h) = f.as_independent(symbol)
if (g is not S.One):
return _invert_real(h, imageset(Lambda(n, (n / g)), g_ys), symbol)
if f.is_Pow:
(base, expo) = f.args
base_has_sym = base.has(symbol)
expo_has_sym = expo.has(symbol)
if (not expo_has_sym):
res = imageset(Lambda(n, real_root(n, expo)), g_ys)
if expo.is_rational:
(numer, denom) = expo.as_numer_denom()
if ((numer == S.One) or (numer == (- S.One))):
return _invert_real(base, res, symbol)
elif ((numer % 2) == 0):
n = Dummy('n')
neg_res = imageset(Lambda(n, (- n)), res)
return _invert_real(base, (res + neg_res), symbol)
else:
return _invert_real(base, res, symbol)
else:
if (not base.is_positive):
raise ValueError('x**w where w is irrational is not defined for negative x')
return _invert_real(base, res, symbol)
if (not base_has_sym):
return _invert_real(expo, imageset(Lambda(n, (log(n) / log(base))), g_ys), symbol)
if isinstance(f, TrigonometricFunction):
if isinstance(g_ys, FiniteSet):
def inv(trig):
if isinstance(f, (sin, csc)):
F = (asin if isinstance(f, sin) else acsc)
return ((lambda a: ((n * pi) + (((- 1) ** n) * F(a)))),)
if isinstance(f, (cos, sec)):
F = (acos if isinstance(f, cos) else asec)
return ((lambda a: (((2 * n) * pi) + F(a))), (lambda a: (((2 * n) * pi) - F(a))))
if isinstance(f, (tan, cot)):
return ((lambda a: ((n * pi) + f.inverse()(a))),)
n = Dummy('n', integer=True)
invs = S.EmptySet
for L in inv(f):
invs += Union(*[imageset(Lambda(n, L(g)), S.Integers) for g in g_ys])
return _invert_real(f.args[0], invs, symbol)
return (f, g_ys) | -8,353,688,872,299,090,000 | Helper function for _invert. | sympy/solvers/solveset.py | _invert_real | aktech/sympy | python | def _invert_real(f, g_ys, symbol):
if (f == symbol):
return (f, g_ys)
n = Dummy('n', real=True)
if (hasattr(f, 'inverse') and (not isinstance(f, (TrigonometricFunction, HyperbolicFunction)))):
if (len(f.args) > 1):
raise ValueError('Only functions with one argument are supported.')
return _invert_real(f.args[0], imageset(Lambda(n, f.inverse()(n)), g_ys), symbol)
if isinstance(f, Abs):
pos = Interval(0, S.Infinity)
neg = Interval(S.NegativeInfinity, 0)
return _invert_real(f.args[0], Union(imageset(Lambda(n, n), g_ys).intersect(pos), imageset(Lambda(n, (- n)), g_ys).intersect(neg)), symbol)
if f.is_Add:
(g, h) = f.as_independent(symbol)
if (g is not S.Zero):
return _invert_real(h, imageset(Lambda(n, (n - g)), g_ys), symbol)
if f.is_Mul:
(g, h) = f.as_independent(symbol)
if (g is not S.One):
return _invert_real(h, imageset(Lambda(n, (n / g)), g_ys), symbol)
if f.is_Pow:
(base, expo) = f.args
base_has_sym = base.has(symbol)
expo_has_sym = expo.has(symbol)
if (not expo_has_sym):
res = imageset(Lambda(n, real_root(n, expo)), g_ys)
if expo.is_rational:
(numer, denom) = expo.as_numer_denom()
if ((numer == S.One) or (numer == (- S.One))):
return _invert_real(base, res, symbol)
elif ((numer % 2) == 0):
n = Dummy('n')
neg_res = imageset(Lambda(n, (- n)), res)
return _invert_real(base, (res + neg_res), symbol)
else:
return _invert_real(base, res, symbol)
else:
if (not base.is_positive):
raise ValueError('x**w where w is irrational is not defined for negative x')
return _invert_real(base, res, symbol)
if (not base_has_sym):
return _invert_real(expo, imageset(Lambda(n, (log(n) / log(base))), g_ys), symbol)
if isinstance(f, TrigonometricFunction):
if isinstance(g_ys, FiniteSet):
def inv(trig):
if isinstance(f, (sin, csc)):
F = (asin if isinstance(f, sin) else acsc)
return ((lambda a: ((n * pi) + (((- 1) ** n) * F(a)))),)
if isinstance(f, (cos, sec)):
F = (acos if isinstance(f, cos) else asec)
return ((lambda a: (((2 * n) * pi) + F(a))), (lambda a: (((2 * n) * pi) - F(a))))
if isinstance(f, (tan, cot)):
return ((lambda a: ((n * pi) + f.inverse()(a))),)
n = Dummy('n', integer=True)
invs = S.EmptySet
for L in inv(f):
invs += Union(*[imageset(Lambda(n, L(g)), S.Integers) for g in g_ys])
return _invert_real(f.args[0], invs, symbol)
return (f, g_ys) |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.