body
stringlengths 26
98.2k
| body_hash
int64 -9,222,864,604,528,158,000
9,221,803,474B
| docstring
stringlengths 1
16.8k
| path
stringlengths 5
230
| name
stringlengths 1
96
| repository_name
stringlengths 7
89
| lang
stringclasses 1
value | body_without_docstring
stringlengths 20
98.2k
|
---|---|---|---|---|---|---|---|
@pytest.mark.filterwarnings('ignore::fitsio.FITSRuntimeWarning')
def test_savesim(simulation, PSRfits):
'\n Test save simulation function.\n '
simulation._Nchan = 1
simulation._tobs = 2.0
simulation.simulate(from_template=True)
simulation.save_simulation(out_format='pdv')
simulation.save_simulation(out_format='psrfits', phaseconnect=False)
os.remove('sim_fits.fits')
dfs = glob.glob('simfits*')
for df in dfs:
os.remove(df)
with pytest.raises(RuntimeError):
simulation.save_simulation(out_format='wrong_fmt')
simulation._tempfile = None
simulation.save_simulation(out_format='psrfits') | -3,575,102,837,991,144,400 | Test save simulation function. | tests/test_simulate.py | test_savesim | bshapiroalbert/PsrSigSim | python | @pytest.mark.filterwarnings('ignore::fitsio.FITSRuntimeWarning')
def test_savesim(simulation, PSRfits):
'\n \n '
simulation._Nchan = 1
simulation._tobs = 2.0
simulation.simulate(from_template=True)
simulation.save_simulation(out_format='pdv')
simulation.save_simulation(out_format='psrfits', phaseconnect=False)
os.remove('sim_fits.fits')
dfs = glob.glob('simfits*')
for df in dfs:
os.remove(df)
with pytest.raises(RuntimeError):
simulation.save_simulation(out_format='wrong_fmt')
simulation._tempfile = None
simulation.save_simulation(out_format='psrfits') |
def step(self, dt):
'step once by dt seconds'
self.time_elapsed += dt
D = cdist(self.state[:, :3], self.state[:, 3:6], 'euclidean')
(ind, din) = np.where((D > 122))
uniqua = (ind == din)
ind = ind[uniqua]
for i in zip(ind):
v = self.state[(i, 8)]
v_avg = v
a_ver = self.acc_vert
a_ver_eff = self.acc_vert_eff
height = (self.max_height - self.state[(i, 2)])
print(height)
if (height > 0):
n = 1
if (v > 0):
n = (v / abs(v))
stop = ((n * (v ** 2)) / (2 * a_ver))
t_end = abs((v / a_ver))
b1 = (((v ** 2) + (t_end ** 2)) ** 0.5)
b2 = ((((v + ((n * a_ver) * dt)) ** 2) + ((t_end + dt) ** 2)) ** 0.5)
s1 = ((((a_ver * dt) ** 2) + (dt ** 2)) ** 0.5)
s2 = (dt * 2)
P = (((b2 - b1) + s1) + s2)
t = (((((P / 2) * ((P / 2) - s1)) * ((P / 2) - s2)) * (((P / 2) - b2) + b1)) ** 0.5)
h = ((2 * t) / (b2 - b1))
area = (n * (t + ((b2 - b1) * h)))
if ((t_end <= dt) and (stop > (height - area))):
v_avg = 0
self.state[(i, 8)] = 0
self.state[(i, 2)] = self.max_height
elif (stop > (height - area)):
t_max = 0
if (stop < height):
a = (2 * (a_ver ** 2))
b = ((4 * a_ver) * v)
c = ((v ** 2) - ((2 * a_ver) * height))
t_max = (((- b) + (((b ** 2) - ((4 * a) * c)) ** 0.5)) / (2 * a))
v_max = (v + (a_ver * (t_max / dt)))
v_end = (((2 * v_max) - v) - (a_ver * dt))
v_avg = ((((v_max + v) / 2) * (t_max / dt)) + (((v_max + v_end) / 2) * ((dt - t_max) / dt)))
self.state[(i, 8)] = v_end
else:
v_avg = (v + ((a_ver * dt) / 2))
self.state[(i, 8)] += (a_ver * dt)
elif (height < 0):
n = (v / abs(v))
stop = ((n * (v ** 2)) / (2 * a_ver_eff))
t_end = abs((v / a_ver_eff))
b1 = (((v ** 2) + (t_end ** 2)) ** 0.5)
b2 = ((((v + ((n * a_ver_eff) * dt)) ** 2) + ((t_end + dt) ** 2)) ** 0.5)
s1 = ((((a_ver_eff * dt) ** 2) + (dt ** 2)) ** 0.5)
s2 = (dt * 2)
P = (((b2 - b1) + s1) + s2)
t = (((((P / 2) * ((P / 2) - s1)) * ((P / 2) - s2)) * (((P / 2) - b2) + b1)) ** 0.5)
h = ((2 * t) / (b2 - b1))
area = (n * (t + ((b2 - b1) * h)))
if ((t_end <= dt) and (abs(stop) <= abs(height))):
v_avg = ((v / 2) * (t_end / dt))
self.state[(i, 8)] = (v + (a_ver_eff * t_end))
elif (stop < (height - area)):
v_max = ((height * (2 * a_ver_eff)) ** 0.5)
t_max = ((v_max - v) / a_ver_eff)
v_end = (((2 * v_max) - v) - (a_ver_eff * dt))
v_avg = ((((v_max + v) / 2) * (t_max / dt)) + (((v_max + v_end) / 2) * ((dt - t_max) / dt)))
self.state[(i, 8)] = v_end
else:
v_avg = (v - ((a_ver_eff * dt) / 2))
self.state[(i, 8)] = (v - (a_ver_eff * dt))
else:
self.state[(i, 8)] += (0 * dt)
self.state[(i, 2)] += (v_avg * dt)
r = (self.state[i, 3:5] - self.state[i, :2])
m = np.linalg.norm(r)
u = (r / m)
a_hor = self.acc_hor
v_hor = self.state[i, 6:8]
h = np.linalg.norm(v_hor)
stop = ((h ** 2) / (2 * a_hor))
t_end = (h / a_hor)
b1 = (((h ** 2) + (t_end ** 2)) ** 0.5)
b2 = ((((h + (a_hor * dt)) ** 2) + ((t_end + dt) ** 2)) ** 0.5)
s1 = ((((a_hor * dt) ** 2) + (dt ** 2)) ** 0.5)
s2 = (dt * 2)
P = (((b2 - b1) + s1) + s2)
t = (((((P / 2) * ((P / 2) - s1)) * ((P / 2) - s2)) * (((P / 2) - b2) + b1)) ** 0.5)
s = ((2 * t) / (b2 - b1))
area = (t + ((b2 - b1) * s))
if ((t_end <= dt) and (stop < area)):
v_hor = ((h / 2) * (t_end / dt))
self.state[i, 6:8] = ((h - (a_hor * t_end)) * u)
elif (stop > (m - area)):
v_max = ((m * (2 * a_hor)) ** 0.5)
t_max = ((v_max - h) / a_hor)
v_end = (((2 * v_max) - h) - (a_hor * dt))
v_hor = ((((v_max + h) / 2) * (t_max / dt)) + (((v_max + v_end) / 2) * ((dt - t_max) / dt)))
self.state[i, 6:8] = (v_end * u)
else:
v_hor = (h + ((a_hor * dt) / 2))
self.state[i, 6:8] = ((h + (a_hor * dt)) * u)
self.state[i, :2] += ((v_hor * dt) * u)
(done, fund) = np.where((D <= 122))
uniquo = (done == fund)
done = done[uniquo]
for d in zip(done):
print('here')
v = self.state[(i, 8)]
v_avg = v
a_ver_eff = self.acc_vert_eff
n = (- 1)
if (v < 0):
n = (v / abs(v))
stop = ((n * (v ** 2)) / (2 * a_ver_eff))
t_end = abs((v / a_ver_eff))
b1 = (((v ** 2) + (t_end ** 2)) ** 0.5)
b2 = ((((v + ((n * a_ver_eff) * dt)) ** 2) + ((t_end + dt) ** 2)) ** 0.5)
s1 = ((((a_ver_eff * dt) ** 2) + (dt ** 2)) ** 0.5)
s2 = (dt * 2)
P = (((b2 - b1) + s1) + s2)
t = (((((P / 2) * ((P / 2) - s1)) * ((P / 2) - s2)) * (((P / 2) - b2) + b1)) ** 0.5)
h = ((2 * t) / (b2 - b1))
area = (n * (t + ((b2 - b1) * h)))
if ((t_end <= dt) and (stop > area)):
v_avg = ((v / 2) * (t_end / dt))
self.state[(i, 8)] = (v + (a_ver_eff * t_end))
self.state[(i, 9)] = self.time_elapsed
elif (stop < ((- self.state[(i, 2)]) - area)):
v_max = (((- self.state[(i, 2)]) * (2 * a_ver_eff)) ** 0.5)
t_max = ((v_max - v) / a_ver_eff)
v_end = (((2 * v_max) - v) - (a_ver_eff * dt))
v_avg = ((((v_max + v) / 2) * (t_max / dt)) + (((v_max + v_end) / 2) * ((dt - t_max) / dt)))
self.state[(i, 8)] = v_end
else:
v_avg = (v - ((a_ver_eff * dt) / 2))
self.state[(i, 8)] = (v - (a_ver_eff * dt))
self.state[(i, 2)] += (v_avg * dt)
E = squareform(pdist(self.state[:, :3], 'euclidean'))
(ind1, ind2) = np.where((E < (2 * self.size)))
unique = (ind1 < ind2)
ind1 = ind1[unique]
ind2 = ind2[unique]
for (i1, i2) in zip(ind1, ind2):
if (self.state[(i1, 2)] > self.state[(i2, 2)]):
self.state[(i1, 8)] += (self.acc_vert * dt)
self.state[(i2, 8)] -= (self.acc_vert_eff * dt)
else:
self.state[(i1, 8)] -= (self.acc_vert * dt)
self.state[(i2, 8)] += (self.acc_vert_eff * dt)
if (self.obstacles > 0):
DO = np.vstack([self.state[:, :3].copy(), self.obs_state.copy()])
F = squareform(pdist(DO, 'euclidean'))
(d_rone, obs) = np.where((F < (2 * self.obstacles_size)))
unique = ((d_rone < obs) and (obs >= self.drones))
d_rone = d_rone[unique]
obs = obs[unique]
for (d, o) in zip(d_rone, obs):
if ((self.obs_state[((o - self.drones), 2)] < 110) and (self.state[(d, 2)] < self.obs_state[((o - self.drones), 2)])):
self.state[(d, 8)] += (self.acc_vert * dt)
else:
r = (self.state[d, 3:5] - self.state[d, :2])
ro = (self.obs_state[(o - self.drones), :2] - self.state[d, :2])
r_rel = np.cross(r, ro)
if (r_rel[2] > 0):
self.state[(d, 6)] += (self.acc_hor * dt)
self.state[(d, 7)] += (self.acc_hor * dt)
else:
self.state[(d, 6)] -= (self.acc_hor * dt)
self.state[(d, 7)] -= (self.acc_hor * dt)
np.clip(self.state[:, 6], ((- self.max_speed) + self.wind[0]), (self.max_speed + self.wind[0]))
np.clip(self.state[:, 7], ((- self.max_speed) + self.wind[1]), (self.max_speed + self.wind[1])) | -7,377,819,459,808,068,000 | step once by dt seconds | drone_2.py | step | SVJayanthi/DroneSimulation | python | def step(self, dt):
self.time_elapsed += dt
D = cdist(self.state[:, :3], self.state[:, 3:6], 'euclidean')
(ind, din) = np.where((D > 122))
uniqua = (ind == din)
ind = ind[uniqua]
for i in zip(ind):
v = self.state[(i, 8)]
v_avg = v
a_ver = self.acc_vert
a_ver_eff = self.acc_vert_eff
height = (self.max_height - self.state[(i, 2)])
print(height)
if (height > 0):
n = 1
if (v > 0):
n = (v / abs(v))
stop = ((n * (v ** 2)) / (2 * a_ver))
t_end = abs((v / a_ver))
b1 = (((v ** 2) + (t_end ** 2)) ** 0.5)
b2 = ((((v + ((n * a_ver) * dt)) ** 2) + ((t_end + dt) ** 2)) ** 0.5)
s1 = ((((a_ver * dt) ** 2) + (dt ** 2)) ** 0.5)
s2 = (dt * 2)
P = (((b2 - b1) + s1) + s2)
t = (((((P / 2) * ((P / 2) - s1)) * ((P / 2) - s2)) * (((P / 2) - b2) + b1)) ** 0.5)
h = ((2 * t) / (b2 - b1))
area = (n * (t + ((b2 - b1) * h)))
if ((t_end <= dt) and (stop > (height - area))):
v_avg = 0
self.state[(i, 8)] = 0
self.state[(i, 2)] = self.max_height
elif (stop > (height - area)):
t_max = 0
if (stop < height):
a = (2 * (a_ver ** 2))
b = ((4 * a_ver) * v)
c = ((v ** 2) - ((2 * a_ver) * height))
t_max = (((- b) + (((b ** 2) - ((4 * a) * c)) ** 0.5)) / (2 * a))
v_max = (v + (a_ver * (t_max / dt)))
v_end = (((2 * v_max) - v) - (a_ver * dt))
v_avg = ((((v_max + v) / 2) * (t_max / dt)) + (((v_max + v_end) / 2) * ((dt - t_max) / dt)))
self.state[(i, 8)] = v_end
else:
v_avg = (v + ((a_ver * dt) / 2))
self.state[(i, 8)] += (a_ver * dt)
elif (height < 0):
n = (v / abs(v))
stop = ((n * (v ** 2)) / (2 * a_ver_eff))
t_end = abs((v / a_ver_eff))
b1 = (((v ** 2) + (t_end ** 2)) ** 0.5)
b2 = ((((v + ((n * a_ver_eff) * dt)) ** 2) + ((t_end + dt) ** 2)) ** 0.5)
s1 = ((((a_ver_eff * dt) ** 2) + (dt ** 2)) ** 0.5)
s2 = (dt * 2)
P = (((b2 - b1) + s1) + s2)
t = (((((P / 2) * ((P / 2) - s1)) * ((P / 2) - s2)) * (((P / 2) - b2) + b1)) ** 0.5)
h = ((2 * t) / (b2 - b1))
area = (n * (t + ((b2 - b1) * h)))
if ((t_end <= dt) and (abs(stop) <= abs(height))):
v_avg = ((v / 2) * (t_end / dt))
self.state[(i, 8)] = (v + (a_ver_eff * t_end))
elif (stop < (height - area)):
v_max = ((height * (2 * a_ver_eff)) ** 0.5)
t_max = ((v_max - v) / a_ver_eff)
v_end = (((2 * v_max) - v) - (a_ver_eff * dt))
v_avg = ((((v_max + v) / 2) * (t_max / dt)) + (((v_max + v_end) / 2) * ((dt - t_max) / dt)))
self.state[(i, 8)] = v_end
else:
v_avg = (v - ((a_ver_eff * dt) / 2))
self.state[(i, 8)] = (v - (a_ver_eff * dt))
else:
self.state[(i, 8)] += (0 * dt)
self.state[(i, 2)] += (v_avg * dt)
r = (self.state[i, 3:5] - self.state[i, :2])
m = np.linalg.norm(r)
u = (r / m)
a_hor = self.acc_hor
v_hor = self.state[i, 6:8]
h = np.linalg.norm(v_hor)
stop = ((h ** 2) / (2 * a_hor))
t_end = (h / a_hor)
b1 = (((h ** 2) + (t_end ** 2)) ** 0.5)
b2 = ((((h + (a_hor * dt)) ** 2) + ((t_end + dt) ** 2)) ** 0.5)
s1 = ((((a_hor * dt) ** 2) + (dt ** 2)) ** 0.5)
s2 = (dt * 2)
P = (((b2 - b1) + s1) + s2)
t = (((((P / 2) * ((P / 2) - s1)) * ((P / 2) - s2)) * (((P / 2) - b2) + b1)) ** 0.5)
s = ((2 * t) / (b2 - b1))
area = (t + ((b2 - b1) * s))
if ((t_end <= dt) and (stop < area)):
v_hor = ((h / 2) * (t_end / dt))
self.state[i, 6:8] = ((h - (a_hor * t_end)) * u)
elif (stop > (m - area)):
v_max = ((m * (2 * a_hor)) ** 0.5)
t_max = ((v_max - h) / a_hor)
v_end = (((2 * v_max) - h) - (a_hor * dt))
v_hor = ((((v_max + h) / 2) * (t_max / dt)) + (((v_max + v_end) / 2) * ((dt - t_max) / dt)))
self.state[i, 6:8] = (v_end * u)
else:
v_hor = (h + ((a_hor * dt) / 2))
self.state[i, 6:8] = ((h + (a_hor * dt)) * u)
self.state[i, :2] += ((v_hor * dt) * u)
(done, fund) = np.where((D <= 122))
uniquo = (done == fund)
done = done[uniquo]
for d in zip(done):
print('here')
v = self.state[(i, 8)]
v_avg = v
a_ver_eff = self.acc_vert_eff
n = (- 1)
if (v < 0):
n = (v / abs(v))
stop = ((n * (v ** 2)) / (2 * a_ver_eff))
t_end = abs((v / a_ver_eff))
b1 = (((v ** 2) + (t_end ** 2)) ** 0.5)
b2 = ((((v + ((n * a_ver_eff) * dt)) ** 2) + ((t_end + dt) ** 2)) ** 0.5)
s1 = ((((a_ver_eff * dt) ** 2) + (dt ** 2)) ** 0.5)
s2 = (dt * 2)
P = (((b2 - b1) + s1) + s2)
t = (((((P / 2) * ((P / 2) - s1)) * ((P / 2) - s2)) * (((P / 2) - b2) + b1)) ** 0.5)
h = ((2 * t) / (b2 - b1))
area = (n * (t + ((b2 - b1) * h)))
if ((t_end <= dt) and (stop > area)):
v_avg = ((v / 2) * (t_end / dt))
self.state[(i, 8)] = (v + (a_ver_eff * t_end))
self.state[(i, 9)] = self.time_elapsed
elif (stop < ((- self.state[(i, 2)]) - area)):
v_max = (((- self.state[(i, 2)]) * (2 * a_ver_eff)) ** 0.5)
t_max = ((v_max - v) / a_ver_eff)
v_end = (((2 * v_max) - v) - (a_ver_eff * dt))
v_avg = ((((v_max + v) / 2) * (t_max / dt)) + (((v_max + v_end) / 2) * ((dt - t_max) / dt)))
self.state[(i, 8)] = v_end
else:
v_avg = (v - ((a_ver_eff * dt) / 2))
self.state[(i, 8)] = (v - (a_ver_eff * dt))
self.state[(i, 2)] += (v_avg * dt)
E = squareform(pdist(self.state[:, :3], 'euclidean'))
(ind1, ind2) = np.where((E < (2 * self.size)))
unique = (ind1 < ind2)
ind1 = ind1[unique]
ind2 = ind2[unique]
for (i1, i2) in zip(ind1, ind2):
if (self.state[(i1, 2)] > self.state[(i2, 2)]):
self.state[(i1, 8)] += (self.acc_vert * dt)
self.state[(i2, 8)] -= (self.acc_vert_eff * dt)
else:
self.state[(i1, 8)] -= (self.acc_vert * dt)
self.state[(i2, 8)] += (self.acc_vert_eff * dt)
if (self.obstacles > 0):
DO = np.vstack([self.state[:, :3].copy(), self.obs_state.copy()])
F = squareform(pdist(DO, 'euclidean'))
(d_rone, obs) = np.where((F < (2 * self.obstacles_size)))
unique = ((d_rone < obs) and (obs >= self.drones))
d_rone = d_rone[unique]
obs = obs[unique]
for (d, o) in zip(d_rone, obs):
if ((self.obs_state[((o - self.drones), 2)] < 110) and (self.state[(d, 2)] < self.obs_state[((o - self.drones), 2)])):
self.state[(d, 8)] += (self.acc_vert * dt)
else:
r = (self.state[d, 3:5] - self.state[d, :2])
ro = (self.obs_state[(o - self.drones), :2] - self.state[d, :2])
r_rel = np.cross(r, ro)
if (r_rel[2] > 0):
self.state[(d, 6)] += (self.acc_hor * dt)
self.state[(d, 7)] += (self.acc_hor * dt)
else:
self.state[(d, 6)] -= (self.acc_hor * dt)
self.state[(d, 7)] -= (self.acc_hor * dt)
np.clip(self.state[:, 6], ((- self.max_speed) + self.wind[0]), (self.max_speed + self.wind[0]))
np.clip(self.state[:, 7], ((- self.max_speed) + self.wind[1]), (self.max_speed + self.wind[1])) |
def __init__(self, taxonomy, logfile=None, verbose=True, debug=False):
'\n @param taxonomy: taxonomy handler\n @type taxonomy: NcbiTaxonomy\n @param logfile: file handler or file path to a log file\n @type logfile: file | FileIO | StringIO | str\n @param verbose: Not verbose means that only warnings and errors will be past to stream\n @type verbose: bool\n @param debug: Display debug messages\n @type debug: bool\n '
super(TaxonomicProfile, self).__init__(label='TaxonomicProfile', logfile=logfile, verbose=verbose, debug=debug)
self._ranks = ['superkingdom', 'phylum', 'class', 'order', 'family', 'genus', 'species', 'strain']
assert isinstance(taxonomy, NcbiTaxonomy)
self._taxonomy = taxonomy
self._filename_taxonomic_profile = 'taxonomic_profile_{sample_index}.txt' | -591,944,233,567,744,300 | @param taxonomy: taxonomy handler
@type taxonomy: NcbiTaxonomy
@param logfile: file handler or file path to a log file
@type logfile: file | FileIO | StringIO | str
@param verbose: Not verbose means that only warnings and errors will be past to stream
@type verbose: bool
@param debug: Display debug messages
@type debug: bool | scripts/ComunityDesign/taxonomicprofile.py | __init__ | alienzj/CAMISIM | python | def __init__(self, taxonomy, logfile=None, verbose=True, debug=False):
'\n @param taxonomy: taxonomy handler\n @type taxonomy: NcbiTaxonomy\n @param logfile: file handler or file path to a log file\n @type logfile: file | FileIO | StringIO | str\n @param verbose: Not verbose means that only warnings and errors will be past to stream\n @type verbose: bool\n @param debug: Display debug messages\n @type debug: bool\n '
super(TaxonomicProfile, self).__init__(label='TaxonomicProfile', logfile=logfile, verbose=verbose, debug=debug)
self._ranks = ['superkingdom', 'phylum', 'class', 'order', 'family', 'genus', 'species', 'strain']
assert isinstance(taxonomy, NcbiTaxonomy)
self._taxonomy = taxonomy
self._filename_taxonomic_profile = 'taxonomic_profile_{sample_index}.txt' |
def write_taxonomic_profile_from_abundance_files(self, metadata_table, list_of_file_paths, directory_output, sample_id=''):
'\n Write a taxonomic profile file for each relative abundance file\n\n @param metadata_table: Contains metadata of all communities\n @type metadata_table: MetadataTable\n @param list_of_file_paths: List of abundance file paths\n @type list_of_file_paths: list[str | unicode]\n @param directory_output: Profiles are written in this directory\n @type directory_output: str | unicode\n @param sample_id: Identifier of a sample\n @type sample_id: str | unicode\n '
metadata_table_tmp = MetadataTable(logfile=self._logfile, verbose=self._verbose)
for (index_abundance, file_path) in enumerate(list_of_file_paths):
community_abundance = metadata_table_tmp.parse_file(file_path, column_names=False)
file_path_output = os.path.join(directory_output, self._filename_taxonomic_profile.format(sample_index=index_abundance))
with open(file_path_output, 'w') as stream_output:
self.write_taxonomic_profile(community_abundance, stream_output, metadata_table, sample_id) | -3,255,416,599,153,192,000 | Write a taxonomic profile file for each relative abundance file
@param metadata_table: Contains metadata of all communities
@type metadata_table: MetadataTable
@param list_of_file_paths: List of abundance file paths
@type list_of_file_paths: list[str | unicode]
@param directory_output: Profiles are written in this directory
@type directory_output: str | unicode
@param sample_id: Identifier of a sample
@type sample_id: str | unicode | scripts/ComunityDesign/taxonomicprofile.py | write_taxonomic_profile_from_abundance_files | alienzj/CAMISIM | python | def write_taxonomic_profile_from_abundance_files(self, metadata_table, list_of_file_paths, directory_output, sample_id=):
'\n Write a taxonomic profile file for each relative abundance file\n\n @param metadata_table: Contains metadata of all communities\n @type metadata_table: MetadataTable\n @param list_of_file_paths: List of abundance file paths\n @type list_of_file_paths: list[str | unicode]\n @param directory_output: Profiles are written in this directory\n @type directory_output: str | unicode\n @param sample_id: Identifier of a sample\n @type sample_id: str | unicode\n '
metadata_table_tmp = MetadataTable(logfile=self._logfile, verbose=self._verbose)
for (index_abundance, file_path) in enumerate(list_of_file_paths):
community_abundance = metadata_table_tmp.parse_file(file_path, column_names=False)
file_path_output = os.path.join(directory_output, self._filename_taxonomic_profile.format(sample_index=index_abundance))
with open(file_path_output, 'w') as stream_output:
self.write_taxonomic_profile(community_abundance, stream_output, metadata_table, sample_id) |
def write_taxonomic_profile(self, community_abundance, stream_output, metadata_table, sample_id=''):
'\n Stream a taxonomic profile by list of relative abundances\n\n @param community_abundance: list of relative abundances\n @type community_abundance: generator[ dict[int|long|str|unicode, str|unicode] ]\n @param stream_output: Output of taxonomic profile\n @type stream_output: file | FileIO | StringIO\n @param metadata_table: Contains metadata of all communities\n @type metadata_table: MetadataTable\n @param sample_id: Identifier of a sample\n @type sample_id: str | unicode\n '
assert isinstance(metadata_table, MetadataTable)
genome_abundance = {}
total_abundance = 0.0
for (genome_id, abundance) in community_abundance:
if (genome_id in genome_abundance):
raise IOError("genome id '{}' is not unique!".format(genome_id))
genome_abundance[genome_id] = float(abundance)
total_abundance += genome_abundance[genome_id]
for (key, value) in genome_abundance.items():
genome_abundance[key] = (value / total_abundance)
self._stream_taxonomic_profile(stream_output, genome_abundance, metadata_table, sample_id) | 8,573,800,812,221,487,000 | Stream a taxonomic profile by list of relative abundances
@param community_abundance: list of relative abundances
@type community_abundance: generator[ dict[int|long|str|unicode, str|unicode] ]
@param stream_output: Output of taxonomic profile
@type stream_output: file | FileIO | StringIO
@param metadata_table: Contains metadata of all communities
@type metadata_table: MetadataTable
@param sample_id: Identifier of a sample
@type sample_id: str | unicode | scripts/ComunityDesign/taxonomicprofile.py | write_taxonomic_profile | alienzj/CAMISIM | python | def write_taxonomic_profile(self, community_abundance, stream_output, metadata_table, sample_id=):
'\n Stream a taxonomic profile by list of relative abundances\n\n @param community_abundance: list of relative abundances\n @type community_abundance: generator[ dict[int|long|str|unicode, str|unicode] ]\n @param stream_output: Output of taxonomic profile\n @type stream_output: file | FileIO | StringIO\n @param metadata_table: Contains metadata of all communities\n @type metadata_table: MetadataTable\n @param sample_id: Identifier of a sample\n @type sample_id: str | unicode\n '
assert isinstance(metadata_table, MetadataTable)
genome_abundance = {}
total_abundance = 0.0
for (genome_id, abundance) in community_abundance:
if (genome_id in genome_abundance):
raise IOError("genome id '{}' is not unique!".format(genome_id))
genome_abundance[genome_id] = float(abundance)
total_abundance += genome_abundance[genome_id]
for (key, value) in genome_abundance.items():
genome_abundance[key] = (value / total_abundance)
self._stream_taxonomic_profile(stream_output, genome_abundance, metadata_table, sample_id) |
def _stream_taxonomic_profile(self, stream_output, genome_id_to_percent, metadata_table, sample_id=''):
'\n Stream a taxonomic profile by list of percentages by genome id\n\n @param stream_output: Output of taxonomic profile\n @type stream_output: file | FileIO | StringIO\n @param genome_id_to_percent: Percentage for each genome id\n @type genome_id_to_percent: dict[str|unicode, float]\n @param metadata_table: Contains metadata of all communities\n @type metadata_table: MetadataTable\n @param sample_id: Identifier of a sample\n @type sample_id: str | unicode\n '
strain_id_to_genome_id = {}
genome_id_to_strain_id = {}
genome_id_to_taxid = metadata_table.get_map(key_column_name='genome_ID', value_column_name='NCBI_ID')
genome_id_to_otu = metadata_table.get_map(key_column_name='genome_ID', value_column_name='OTU')
column_genome_id = metadata_table.get_column('genome_ID')
if (not metadata_table.has_column('strain_id')):
column_strain_id = metadata_table.get_empty_column()
else:
column_strain_id = metadata_table.get_column('strain_id')
genome_id_to_strain_id = metadata_table.get_map(key_column_name='genome_ID', value_column_name='strain_id')
genome_id_to_lineage = self._get_genome_id_to_lineage(genome_id_to_percent.keys(), genome_id_to_taxid, strain_id_to_genome_id, genome_id_to_strain_id)
percent_by_rank_by_taxid = self._get_percent_by_rank_by_taxid(genome_id_to_lineage, genome_id_to_percent)
self._stream_tp_header(stream_output, sample_id)
self._stream_tp_rows(stream_output, percent_by_rank_by_taxid, strain_id_to_genome_id, genome_id_to_otu) | -2,194,851,972,731,540,000 | Stream a taxonomic profile by list of percentages by genome id
@param stream_output: Output of taxonomic profile
@type stream_output: file | FileIO | StringIO
@param genome_id_to_percent: Percentage for each genome id
@type genome_id_to_percent: dict[str|unicode, float]
@param metadata_table: Contains metadata of all communities
@type metadata_table: MetadataTable
@param sample_id: Identifier of a sample
@type sample_id: str | unicode | scripts/ComunityDesign/taxonomicprofile.py | _stream_taxonomic_profile | alienzj/CAMISIM | python | def _stream_taxonomic_profile(self, stream_output, genome_id_to_percent, metadata_table, sample_id=):
'\n Stream a taxonomic profile by list of percentages by genome id\n\n @param stream_output: Output of taxonomic profile\n @type stream_output: file | FileIO | StringIO\n @param genome_id_to_percent: Percentage for each genome id\n @type genome_id_to_percent: dict[str|unicode, float]\n @param metadata_table: Contains metadata of all communities\n @type metadata_table: MetadataTable\n @param sample_id: Identifier of a sample\n @type sample_id: str | unicode\n '
strain_id_to_genome_id = {}
genome_id_to_strain_id = {}
genome_id_to_taxid = metadata_table.get_map(key_column_name='genome_ID', value_column_name='NCBI_ID')
genome_id_to_otu = metadata_table.get_map(key_column_name='genome_ID', value_column_name='OTU')
column_genome_id = metadata_table.get_column('genome_ID')
if (not metadata_table.has_column('strain_id')):
column_strain_id = metadata_table.get_empty_column()
else:
column_strain_id = metadata_table.get_column('strain_id')
genome_id_to_strain_id = metadata_table.get_map(key_column_name='genome_ID', value_column_name='strain_id')
genome_id_to_lineage = self._get_genome_id_to_lineage(genome_id_to_percent.keys(), genome_id_to_taxid, strain_id_to_genome_id, genome_id_to_strain_id)
percent_by_rank_by_taxid = self._get_percent_by_rank_by_taxid(genome_id_to_lineage, genome_id_to_percent)
self._stream_tp_header(stream_output, sample_id)
self._stream_tp_rows(stream_output, percent_by_rank_by_taxid, strain_id_to_genome_id, genome_id_to_otu) |
def _get_genome_id_to_lineage(self, list_of_genome_id, genome_id_to_taxid, strain_id_to_genome_id, genome_id_to_strain_id):
'\n Returnes the lineage for each genome id, assigning new strain id if not available\n\n @param list_of_genome_id: List of identifier of genomes\n @type list_of_genome_id: list[str|unicode]\n @param genome_id_to_taxid: Assigned taxid for each genome id\n @type genome_id_to_taxid: dict[str|unicode, str|unicode]\n @param strain_id_to_genome_id: Mapping from strain id to genome id\n @type strain_id_to_genome_id: dict[str|unicode, str|unicode]\n @param genome_id_to_strain_id: Mapping from genome id to strain id\n @type genome_id_to_strain_id: dict[str|unicode, str|unicode]\n\n @return: lineage for each genome id using genome id as key\n @rtype: dict[str|unicode, list[None|str|unicode]]\n '
strains_by_taxid = {}
genome_id_to_lineage = {}
for genome_id in list_of_genome_id:
tax_id = genome_id_to_taxid[genome_id]
if (tax_id == ''):
raise KeyError("genome_ID '{}' has no taxid!".format(genome_id))
tax_id = self._taxonomy.get_updated_taxid(tax_id)
genome_id_to_lineage[genome_id] = self._taxonomy.get_lineage_of_legal_ranks(tax_id, ranks=self._ranks, default_value=None)
if (genome_id_to_lineage[genome_id][(- 1)] is not None):
continue
if (tax_id not in strains_by_taxid):
strains_by_taxid[tax_id] = 0
strains_by_taxid[tax_id] += 1
if ((genome_id in genome_id_to_strain_id) and genome_id_to_strain_id[genome_id]):
strain_id = genome_id_to_strain_id[genome_id]
else:
strain_id = '{}.{}'.format(tax_id, strains_by_taxid[tax_id])
while (strain_id in genome_id_to_strain_id.values()):
strains_by_taxid[tax_id] += 1
strain_id = '{}.{}'.format(tax_id, strains_by_taxid[tax_id])
genome_id_to_strain_id[genome_id] = strain_id
genome_id_to_lineage[genome_id][(- 1)] = strain_id
strain_id_to_genome_id[strain_id] = genome_id
return genome_id_to_lineage | 544,826,354,776,588,500 | Returnes the lineage for each genome id, assigning new strain id if not available
@param list_of_genome_id: List of identifier of genomes
@type list_of_genome_id: list[str|unicode]
@param genome_id_to_taxid: Assigned taxid for each genome id
@type genome_id_to_taxid: dict[str|unicode, str|unicode]
@param strain_id_to_genome_id: Mapping from strain id to genome id
@type strain_id_to_genome_id: dict[str|unicode, str|unicode]
@param genome_id_to_strain_id: Mapping from genome id to strain id
@type genome_id_to_strain_id: dict[str|unicode, str|unicode]
@return: lineage for each genome id using genome id as key
@rtype: dict[str|unicode, list[None|str|unicode]] | scripts/ComunityDesign/taxonomicprofile.py | _get_genome_id_to_lineage | alienzj/CAMISIM | python | def _get_genome_id_to_lineage(self, list_of_genome_id, genome_id_to_taxid, strain_id_to_genome_id, genome_id_to_strain_id):
'\n Returnes the lineage for each genome id, assigning new strain id if not available\n\n @param list_of_genome_id: List of identifier of genomes\n @type list_of_genome_id: list[str|unicode]\n @param genome_id_to_taxid: Assigned taxid for each genome id\n @type genome_id_to_taxid: dict[str|unicode, str|unicode]\n @param strain_id_to_genome_id: Mapping from strain id to genome id\n @type strain_id_to_genome_id: dict[str|unicode, str|unicode]\n @param genome_id_to_strain_id: Mapping from genome id to strain id\n @type genome_id_to_strain_id: dict[str|unicode, str|unicode]\n\n @return: lineage for each genome id using genome id as key\n @rtype: dict[str|unicode, list[None|str|unicode]]\n '
strains_by_taxid = {}
genome_id_to_lineage = {}
for genome_id in list_of_genome_id:
tax_id = genome_id_to_taxid[genome_id]
if (tax_id == ):
raise KeyError("genome_ID '{}' has no taxid!".format(genome_id))
tax_id = self._taxonomy.get_updated_taxid(tax_id)
genome_id_to_lineage[genome_id] = self._taxonomy.get_lineage_of_legal_ranks(tax_id, ranks=self._ranks, default_value=None)
if (genome_id_to_lineage[genome_id][(- 1)] is not None):
continue
if (tax_id not in strains_by_taxid):
strains_by_taxid[tax_id] = 0
strains_by_taxid[tax_id] += 1
if ((genome_id in genome_id_to_strain_id) and genome_id_to_strain_id[genome_id]):
strain_id = genome_id_to_strain_id[genome_id]
else:
strain_id = '{}.{}'.format(tax_id, strains_by_taxid[tax_id])
while (strain_id in genome_id_to_strain_id.values()):
strains_by_taxid[tax_id] += 1
strain_id = '{}.{}'.format(tax_id, strains_by_taxid[tax_id])
genome_id_to_strain_id[genome_id] = strain_id
genome_id_to_lineage[genome_id][(- 1)] = strain_id
strain_id_to_genome_id[strain_id] = genome_id
return genome_id_to_lineage |
def _get_percent_by_rank_by_taxid(self, genome_id_to_lineage, genome_id_to_percent):
'\n Return the percentage for each taxid of a list of default ranks\n\n @param genome_id_to_lineage: Mapping from genome id to a lineage (list)\n @type genome_id_to_lineage: dict[str|unicode, list[None|str|unicode]]\n @param genome_id_to_percent: Mapping from genome id to percentage\n @type genome_id_to_percent: dict[str|unicode, float]\n\n @return: Percentage for each taxid of a list of default ranks as dictionary of dictionaries\n @rtype: dict[str|unicode, dict[str|unicode, float]]\n '
percent_by_rank_by_taxid = {}
for rank in self._ranks:
percent_by_rank_by_taxid[rank] = dict()
for (rank_index, rank) in enumerate(self._ranks):
for genome_id in genome_id_to_lineage:
tax_id = genome_id_to_lineage[genome_id][rank_index]
if (tax_id is None):
continue
percent = genome_id_to_percent[genome_id]
if (tax_id not in percent_by_rank_by_taxid[rank]):
percent_by_rank_by_taxid[rank][tax_id] = 0
percent_by_rank_by_taxid[rank][tax_id] += percent
return percent_by_rank_by_taxid | -4,990,005,371,244,257,000 | Return the percentage for each taxid of a list of default ranks
@param genome_id_to_lineage: Mapping from genome id to a lineage (list)
@type genome_id_to_lineage: dict[str|unicode, list[None|str|unicode]]
@param genome_id_to_percent: Mapping from genome id to percentage
@type genome_id_to_percent: dict[str|unicode, float]
@return: Percentage for each taxid of a list of default ranks as dictionary of dictionaries
@rtype: dict[str|unicode, dict[str|unicode, float]] | scripts/ComunityDesign/taxonomicprofile.py | _get_percent_by_rank_by_taxid | alienzj/CAMISIM | python | def _get_percent_by_rank_by_taxid(self, genome_id_to_lineage, genome_id_to_percent):
'\n Return the percentage for each taxid of a list of default ranks\n\n @param genome_id_to_lineage: Mapping from genome id to a lineage (list)\n @type genome_id_to_lineage: dict[str|unicode, list[None|str|unicode]]\n @param genome_id_to_percent: Mapping from genome id to percentage\n @type genome_id_to_percent: dict[str|unicode, float]\n\n @return: Percentage for each taxid of a list of default ranks as dictionary of dictionaries\n @rtype: dict[str|unicode, dict[str|unicode, float]]\n '
percent_by_rank_by_taxid = {}
for rank in self._ranks:
percent_by_rank_by_taxid[rank] = dict()
for (rank_index, rank) in enumerate(self._ranks):
for genome_id in genome_id_to_lineage:
tax_id = genome_id_to_lineage[genome_id][rank_index]
if (tax_id is None):
continue
percent = genome_id_to_percent[genome_id]
if (tax_id not in percent_by_rank_by_taxid[rank]):
percent_by_rank_by_taxid[rank][tax_id] = 0
percent_by_rank_by_taxid[rank][tax_id] += percent
return percent_by_rank_by_taxid |
def _stream_tp_rows(self, stream_output, percent_by_rank_by_taxid, strain_id_to_genome_id, genome_id_to_otu):
'\n Stream the rows of the taxonomic profile.\n\n @param stream_output: Output of taxonomic profile\n @type stream_output: file | FileIO | StringIO\n @param percent_by_rank_by_taxid: Percentage for each taxid of a list of default ranks as dictionary of dictionaries\n @type percent_by_rank_by_taxid: dict[str|unicode, dict[str|unicode, float]]\n @param strain_id_to_genome_id: Map from strain id to a genome identifier\n @type strain_id_to_genome_id: dict[str|unicode, str|unicode]\n @param genome_id_to_otu: Map from genome id to an otu identifier\n @type genome_id_to_otu: dict[str|unicode, str|unicode]\n '
row_format = '{taxid}\t{rank}\t{taxpath}\t{taxpath_sn}\t{abp:.4f}\t{gid}\t{otu}\n'
for (rank_index, rank) in enumerate(self._ranks):
for tax_id in percent_by_rank_by_taxid[rank]:
if (tax_id == ''):
self._logger.warning(('Missing rank %s for a genome' % rank))
continue
if ('.' in tax_id):
genome_id = strain_id_to_genome_id[tax_id]
otu = genome_id_to_otu[genome_id]
lineage = self._taxonomy.get_lineage_of_legal_ranks(tax_id.split('.')[0], ranks=self._ranks, default_value='')
lineage[(- 1)] = tax_id
else:
genome_id = ''
otu = ''
lineage = self._taxonomy.get_lineage_of_legal_ranks(tax_id, ranks=self._ranks, default_value='')
lineage = lineage[:(rank_index + 1)]
lineage_sn = [(self._taxonomy.get_scientific_name(tid) if ((tid != '') and ('.' not in tid)) else '') for tid in lineage]
if ('.' in tax_id):
lineage_sn[(- 1)] = (self._taxonomy.get_scientific_name(tax_id.split('.')[0]) + ' strain')
if (percent_by_rank_by_taxid[rank][tax_id] != 0):
stream_output.write(row_format.format(taxid=tax_id, rank=rank, taxpath='|'.join(lineage), taxpath_sn='|'.join(lineage_sn), abp=(percent_by_rank_by_taxid[rank][tax_id] * 100), gid=genome_id, otu=otu)) | 4,757,035,080,381,341,000 | Stream the rows of the taxonomic profile.
@param stream_output: Output of taxonomic profile
@type stream_output: file | FileIO | StringIO
@param percent_by_rank_by_taxid: Percentage for each taxid of a list of default ranks as dictionary of dictionaries
@type percent_by_rank_by_taxid: dict[str|unicode, dict[str|unicode, float]]
@param strain_id_to_genome_id: Map from strain id to a genome identifier
@type strain_id_to_genome_id: dict[str|unicode, str|unicode]
@param genome_id_to_otu: Map from genome id to an otu identifier
@type genome_id_to_otu: dict[str|unicode, str|unicode] | scripts/ComunityDesign/taxonomicprofile.py | _stream_tp_rows | alienzj/CAMISIM | python | def _stream_tp_rows(self, stream_output, percent_by_rank_by_taxid, strain_id_to_genome_id, genome_id_to_otu):
'\n Stream the rows of the taxonomic profile.\n\n @param stream_output: Output of taxonomic profile\n @type stream_output: file | FileIO | StringIO\n @param percent_by_rank_by_taxid: Percentage for each taxid of a list of default ranks as dictionary of dictionaries\n @type percent_by_rank_by_taxid: dict[str|unicode, dict[str|unicode, float]]\n @param strain_id_to_genome_id: Map from strain id to a genome identifier\n @type strain_id_to_genome_id: dict[str|unicode, str|unicode]\n @param genome_id_to_otu: Map from genome id to an otu identifier\n @type genome_id_to_otu: dict[str|unicode, str|unicode]\n '
row_format = '{taxid}\t{rank}\t{taxpath}\t{taxpath_sn}\t{abp:.4f}\t{gid}\t{otu}\n'
for (rank_index, rank) in enumerate(self._ranks):
for tax_id in percent_by_rank_by_taxid[rank]:
if (tax_id == ):
self._logger.warning(('Missing rank %s for a genome' % rank))
continue
if ('.' in tax_id):
genome_id = strain_id_to_genome_id[tax_id]
otu = genome_id_to_otu[genome_id]
lineage = self._taxonomy.get_lineage_of_legal_ranks(tax_id.split('.')[0], ranks=self._ranks, default_value=)
lineage[(- 1)] = tax_id
else:
genome_id =
otu =
lineage = self._taxonomy.get_lineage_of_legal_ranks(tax_id, ranks=self._ranks, default_value=)
lineage = lineage[:(rank_index + 1)]
lineage_sn = [(self._taxonomy.get_scientific_name(tid) if ((tid != ) and ('.' not in tid)) else ) for tid in lineage]
if ('.' in tax_id):
lineage_sn[(- 1)] = (self._taxonomy.get_scientific_name(tax_id.split('.')[0]) + ' strain')
if (percent_by_rank_by_taxid[rank][tax_id] != 0):
stream_output.write(row_format.format(taxid=tax_id, rank=rank, taxpath='|'.join(lineage), taxpath_sn='|'.join(lineage_sn), abp=(percent_by_rank_by_taxid[rank][tax_id] * 100), gid=genome_id, otu=otu)) |
def _stream_tp_header(self, output_stream, identifier):
'\n Stream the header of the taxonomic profile.\n\n @param output_stream: Output of taxonomic profile\n @type output_stream: file | FileIO | StringIO\n @param identifier: Identifier of a sample\n @type identifier: str | unicode\n '
output_stream.write('@SampleID:{}\n'.format(identifier))
output_stream.write('@Version:{}\n'.format(self._taxonomic_profile_version))
output_stream.write('@Ranks:{ranks}\n\n'.format(ranks='|'.join(self._ranks)))
output_stream.write('@@TAXID\tRANK\tTAXPATH\tTAXPATHSN\tPERCENTAGE\t_CAMI_genomeID\t_CAMI_OTU\n') | 1,661,015,973,845,467,100 | Stream the header of the taxonomic profile.
@param output_stream: Output of taxonomic profile
@type output_stream: file | FileIO | StringIO
@param identifier: Identifier of a sample
@type identifier: str | unicode | scripts/ComunityDesign/taxonomicprofile.py | _stream_tp_header | alienzj/CAMISIM | python | def _stream_tp_header(self, output_stream, identifier):
'\n Stream the header of the taxonomic profile.\n\n @param output_stream: Output of taxonomic profile\n @type output_stream: file | FileIO | StringIO\n @param identifier: Identifier of a sample\n @type identifier: str | unicode\n '
output_stream.write('@SampleID:{}\n'.format(identifier))
output_stream.write('@Version:{}\n'.format(self._taxonomic_profile_version))
output_stream.write('@Ranks:{ranks}\n\n'.format(ranks='|'.join(self._ranks)))
output_stream.write('@@TAXID\tRANK\tTAXPATH\tTAXPATHSN\tPERCENTAGE\t_CAMI_genomeID\t_CAMI_OTU\n') |
def setup_platform(hass, config, add_devices, discovery_info=None):
'Set up the Bose Soundtouch platform.'
if (DATA_SOUNDTOUCH not in hass.data):
hass.data[DATA_SOUNDTOUCH] = []
if discovery_info:
host = discovery_info['host']
port = int(discovery_info['port'])
if (host in [device.config['host'] for device in hass.data[DATA_SOUNDTOUCH]]):
return
remote_config = {'id': 'ha.component.soundtouch', 'host': host, 'port': port}
soundtouch_device = SoundTouchDevice(None, remote_config)
hass.data[DATA_SOUNDTOUCH].append(soundtouch_device)
add_devices([soundtouch_device])
else:
name = config.get(CONF_NAME)
remote_config = {'id': 'ha.component.soundtouch', 'port': config.get(CONF_PORT), 'host': config.get(CONF_HOST)}
soundtouch_device = SoundTouchDevice(name, remote_config)
hass.data[DATA_SOUNDTOUCH].append(soundtouch_device)
add_devices([soundtouch_device])
def service_handle(service):
'Handle the applying of a service.'
master_device_id = service.data.get('master')
slaves_ids = service.data.get('slaves')
slaves = []
if slaves_ids:
slaves = [device for device in hass.data[DATA_SOUNDTOUCH] if (device.entity_id in slaves_ids)]
master = next([device for device in hass.data[DATA_SOUNDTOUCH] if (device.entity_id == master_device_id)].__iter__(), None)
if (master is None):
_LOGGER.warning('Unable to find master with entity_id: %s', str(master_device_id))
return
if (service.service == SERVICE_PLAY_EVERYWHERE):
slaves = [d for d in hass.data[DATA_SOUNDTOUCH] if (d.entity_id != master_device_id)]
master.create_zone(slaves)
elif (service.service == SERVICE_CREATE_ZONE):
master.create_zone(slaves)
elif (service.service == SERVICE_REMOVE_ZONE_SLAVE):
master.remove_zone_slave(slaves)
elif (service.service == SERVICE_ADD_ZONE_SLAVE):
master.add_zone_slave(slaves)
hass.services.register(DOMAIN, SERVICE_PLAY_EVERYWHERE, service_handle, schema=SOUNDTOUCH_PLAY_EVERYWHERE)
hass.services.register(DOMAIN, SERVICE_CREATE_ZONE, service_handle, schema=SOUNDTOUCH_CREATE_ZONE_SCHEMA)
hass.services.register(DOMAIN, SERVICE_REMOVE_ZONE_SLAVE, service_handle, schema=SOUNDTOUCH_REMOVE_ZONE_SCHEMA)
hass.services.register(DOMAIN, SERVICE_ADD_ZONE_SLAVE, service_handle, schema=SOUNDTOUCH_ADD_ZONE_SCHEMA) | -7,490,018,582,763,252,000 | Set up the Bose Soundtouch platform. | homeassistant/components/media_player/soundtouch.py | setup_platform | Anthonymcqueen21/home-assistant | python | def setup_platform(hass, config, add_devices, discovery_info=None):
if (DATA_SOUNDTOUCH not in hass.data):
hass.data[DATA_SOUNDTOUCH] = []
if discovery_info:
host = discovery_info['host']
port = int(discovery_info['port'])
if (host in [device.config['host'] for device in hass.data[DATA_SOUNDTOUCH]]):
return
remote_config = {'id': 'ha.component.soundtouch', 'host': host, 'port': port}
soundtouch_device = SoundTouchDevice(None, remote_config)
hass.data[DATA_SOUNDTOUCH].append(soundtouch_device)
add_devices([soundtouch_device])
else:
name = config.get(CONF_NAME)
remote_config = {'id': 'ha.component.soundtouch', 'port': config.get(CONF_PORT), 'host': config.get(CONF_HOST)}
soundtouch_device = SoundTouchDevice(name, remote_config)
hass.data[DATA_SOUNDTOUCH].append(soundtouch_device)
add_devices([soundtouch_device])
def service_handle(service):
'Handle the applying of a service.'
master_device_id = service.data.get('master')
slaves_ids = service.data.get('slaves')
slaves = []
if slaves_ids:
slaves = [device for device in hass.data[DATA_SOUNDTOUCH] if (device.entity_id in slaves_ids)]
master = next([device for device in hass.data[DATA_SOUNDTOUCH] if (device.entity_id == master_device_id)].__iter__(), None)
if (master is None):
_LOGGER.warning('Unable to find master with entity_id: %s', str(master_device_id))
return
if (service.service == SERVICE_PLAY_EVERYWHERE):
slaves = [d for d in hass.data[DATA_SOUNDTOUCH] if (d.entity_id != master_device_id)]
master.create_zone(slaves)
elif (service.service == SERVICE_CREATE_ZONE):
master.create_zone(slaves)
elif (service.service == SERVICE_REMOVE_ZONE_SLAVE):
master.remove_zone_slave(slaves)
elif (service.service == SERVICE_ADD_ZONE_SLAVE):
master.add_zone_slave(slaves)
hass.services.register(DOMAIN, SERVICE_PLAY_EVERYWHERE, service_handle, schema=SOUNDTOUCH_PLAY_EVERYWHERE)
hass.services.register(DOMAIN, SERVICE_CREATE_ZONE, service_handle, schema=SOUNDTOUCH_CREATE_ZONE_SCHEMA)
hass.services.register(DOMAIN, SERVICE_REMOVE_ZONE_SLAVE, service_handle, schema=SOUNDTOUCH_REMOVE_ZONE_SCHEMA)
hass.services.register(DOMAIN, SERVICE_ADD_ZONE_SLAVE, service_handle, schema=SOUNDTOUCH_ADD_ZONE_SCHEMA) |
def service_handle(service):
'Handle the applying of a service.'
master_device_id = service.data.get('master')
slaves_ids = service.data.get('slaves')
slaves = []
if slaves_ids:
slaves = [device for device in hass.data[DATA_SOUNDTOUCH] if (device.entity_id in slaves_ids)]
master = next([device for device in hass.data[DATA_SOUNDTOUCH] if (device.entity_id == master_device_id)].__iter__(), None)
if (master is None):
_LOGGER.warning('Unable to find master with entity_id: %s', str(master_device_id))
return
if (service.service == SERVICE_PLAY_EVERYWHERE):
slaves = [d for d in hass.data[DATA_SOUNDTOUCH] if (d.entity_id != master_device_id)]
master.create_zone(slaves)
elif (service.service == SERVICE_CREATE_ZONE):
master.create_zone(slaves)
elif (service.service == SERVICE_REMOVE_ZONE_SLAVE):
master.remove_zone_slave(slaves)
elif (service.service == SERVICE_ADD_ZONE_SLAVE):
master.add_zone_slave(slaves) | -1,882,622,903,427,059,200 | Handle the applying of a service. | homeassistant/components/media_player/soundtouch.py | service_handle | Anthonymcqueen21/home-assistant | python | def service_handle(service):
master_device_id = service.data.get('master')
slaves_ids = service.data.get('slaves')
slaves = []
if slaves_ids:
slaves = [device for device in hass.data[DATA_SOUNDTOUCH] if (device.entity_id in slaves_ids)]
master = next([device for device in hass.data[DATA_SOUNDTOUCH] if (device.entity_id == master_device_id)].__iter__(), None)
if (master is None):
_LOGGER.warning('Unable to find master with entity_id: %s', str(master_device_id))
return
if (service.service == SERVICE_PLAY_EVERYWHERE):
slaves = [d for d in hass.data[DATA_SOUNDTOUCH] if (d.entity_id != master_device_id)]
master.create_zone(slaves)
elif (service.service == SERVICE_CREATE_ZONE):
master.create_zone(slaves)
elif (service.service == SERVICE_REMOVE_ZONE_SLAVE):
master.remove_zone_slave(slaves)
elif (service.service == SERVICE_ADD_ZONE_SLAVE):
master.add_zone_slave(slaves) |
def __init__(self, name, config):
'Create Soundtouch Entity.'
from libsoundtouch import soundtouch_device
self._device = soundtouch_device(config['host'], config['port'])
if (name is None):
self._name = self._device.config.name
else:
self._name = name
self._status = self._device.status()
self._volume = self._device.volume()
self._config = config | 5,014,976,567,706,292,000 | Create Soundtouch Entity. | homeassistant/components/media_player/soundtouch.py | __init__ | Anthonymcqueen21/home-assistant | python | def __init__(self, name, config):
from libsoundtouch import soundtouch_device
self._device = soundtouch_device(config['host'], config['port'])
if (name is None):
self._name = self._device.config.name
else:
self._name = name
self._status = self._device.status()
self._volume = self._device.volume()
self._config = config |
@property
def config(self):
'Return specific soundtouch configuration.'
return self._config | -1,869,705,880,426,975,700 | Return specific soundtouch configuration. | homeassistant/components/media_player/soundtouch.py | config | Anthonymcqueen21/home-assistant | python | @property
def config(self):
return self._config |
@property
def device(self):
'Return Soundtouch device.'
return self._device | -9,087,950,631,513,094,000 | Return Soundtouch device. | homeassistant/components/media_player/soundtouch.py | device | Anthonymcqueen21/home-assistant | python | @property
def device(self):
return self._device |
def update(self):
'Retrieve the latest data.'
self._status = self._device.status()
self._volume = self._device.volume() | -899,547,538,052,837,100 | Retrieve the latest data. | homeassistant/components/media_player/soundtouch.py | update | Anthonymcqueen21/home-assistant | python | def update(self):
self._status = self._device.status()
self._volume = self._device.volume() |
@property
def volume_level(self):
'Volume level of the media player (0..1).'
return (self._volume.actual / 100) | 8,093,680,628,403,417,000 | Volume level of the media player (0..1). | homeassistant/components/media_player/soundtouch.py | volume_level | Anthonymcqueen21/home-assistant | python | @property
def volume_level(self):
return (self._volume.actual / 100) |
@property
def name(self):
'Return the name of the device.'
return self._name | -4,231,536,673,663,769,600 | Return the name of the device. | homeassistant/components/media_player/soundtouch.py | name | Anthonymcqueen21/home-assistant | python | @property
def name(self):
return self._name |
@property
def state(self):
'Return the state of the device.'
if (self._status.source == 'STANDBY'):
return STATE_OFF
return MAP_STATUS.get(self._status.play_status, STATE_UNAVAILABLE) | -8,527,234,900,413,694,000 | Return the state of the device. | homeassistant/components/media_player/soundtouch.py | state | Anthonymcqueen21/home-assistant | python | @property
def state(self):
if (self._status.source == 'STANDBY'):
return STATE_OFF
return MAP_STATUS.get(self._status.play_status, STATE_UNAVAILABLE) |
@property
def is_volume_muted(self):
'Boolean if volume is currently muted.'
return self._volume.muted | 482,485,116,750,703,500 | Boolean if volume is currently muted. | homeassistant/components/media_player/soundtouch.py | is_volume_muted | Anthonymcqueen21/home-assistant | python | @property
def is_volume_muted(self):
return self._volume.muted |
@property
def supported_features(self):
'Flag media player features that are supported.'
return SUPPORT_SOUNDTOUCH | -2,755,989,904,597,544,400 | Flag media player features that are supported. | homeassistant/components/media_player/soundtouch.py | supported_features | Anthonymcqueen21/home-assistant | python | @property
def supported_features(self):
return SUPPORT_SOUNDTOUCH |
def turn_off(self):
'Turn off media player.'
self._device.power_off()
self._status = self._device.status() | 1,831,770,455,033,448,400 | Turn off media player. | homeassistant/components/media_player/soundtouch.py | turn_off | Anthonymcqueen21/home-assistant | python | def turn_off(self):
self._device.power_off()
self._status = self._device.status() |
def turn_on(self):
'Turn on media player.'
self._device.power_on()
self._status = self._device.status() | 7,525,695,993,868,218,000 | Turn on media player. | homeassistant/components/media_player/soundtouch.py | turn_on | Anthonymcqueen21/home-assistant | python | def turn_on(self):
self._device.power_on()
self._status = self._device.status() |
def volume_up(self):
'Volume up the media player.'
self._device.volume_up()
self._volume = self._device.volume() | 8,201,553,596,477,160,000 | Volume up the media player. | homeassistant/components/media_player/soundtouch.py | volume_up | Anthonymcqueen21/home-assistant | python | def volume_up(self):
self._device.volume_up()
self._volume = self._device.volume() |
def volume_down(self):
'Volume down media player.'
self._device.volume_down()
self._volume = self._device.volume() | -3,187,921,067,396,677,600 | Volume down media player. | homeassistant/components/media_player/soundtouch.py | volume_down | Anthonymcqueen21/home-assistant | python | def volume_down(self):
self._device.volume_down()
self._volume = self._device.volume() |
def set_volume_level(self, volume):
'Set volume level, range 0..1.'
self._device.set_volume(int((volume * 100)))
self._volume = self._device.volume() | 6,129,408,550,772,726,000 | Set volume level, range 0..1. | homeassistant/components/media_player/soundtouch.py | set_volume_level | Anthonymcqueen21/home-assistant | python | def set_volume_level(self, volume):
self._device.set_volume(int((volume * 100)))
self._volume = self._device.volume() |
def mute_volume(self, mute):
'Send mute command.'
self._device.mute()
self._volume = self._device.volume() | 1,238,587,044,756,478,700 | Send mute command. | homeassistant/components/media_player/soundtouch.py | mute_volume | Anthonymcqueen21/home-assistant | python | def mute_volume(self, mute):
self._device.mute()
self._volume = self._device.volume() |
def media_play_pause(self):
'Simulate play pause media player.'
self._device.play_pause()
self._status = self._device.status() | 5,345,033,407,334,020,000 | Simulate play pause media player. | homeassistant/components/media_player/soundtouch.py | media_play_pause | Anthonymcqueen21/home-assistant | python | def media_play_pause(self):
self._device.play_pause()
self._status = self._device.status() |
def media_play(self):
'Send play command.'
self._device.play()
self._status = self._device.status() | -5,335,708,738,905,495,000 | Send play command. | homeassistant/components/media_player/soundtouch.py | media_play | Anthonymcqueen21/home-assistant | python | def media_play(self):
self._device.play()
self._status = self._device.status() |
def media_pause(self):
'Send media pause command to media player.'
self._device.pause()
self._status = self._device.status() | 8,402,392,948,099,351,000 | Send media pause command to media player. | homeassistant/components/media_player/soundtouch.py | media_pause | Anthonymcqueen21/home-assistant | python | def media_pause(self):
self._device.pause()
self._status = self._device.status() |
def media_next_track(self):
'Send next track command.'
self._device.next_track()
self._status = self._device.status() | 4,202,091,396,942,798,000 | Send next track command. | homeassistant/components/media_player/soundtouch.py | media_next_track | Anthonymcqueen21/home-assistant | python | def media_next_track(self):
self._device.next_track()
self._status = self._device.status() |
def media_previous_track(self):
'Send the previous track command.'
self._device.previous_track()
self._status = self._device.status() | 4,618,594,907,444,566,000 | Send the previous track command. | homeassistant/components/media_player/soundtouch.py | media_previous_track | Anthonymcqueen21/home-assistant | python | def media_previous_track(self):
self._device.previous_track()
self._status = self._device.status() |
@property
def media_image_url(self):
'Image url of current playing media.'
return self._status.image | -1,630,303,899,873,581,800 | Image url of current playing media. | homeassistant/components/media_player/soundtouch.py | media_image_url | Anthonymcqueen21/home-assistant | python | @property
def media_image_url(self):
return self._status.image |
@property
def media_title(self):
'Title of current playing media.'
if (self._status.station_name is not None):
return self._status.station_name
elif (self._status.artist is not None):
return ((self._status.artist + ' - ') + self._status.track)
return None | -2,944,051,756,661,191,000 | Title of current playing media. | homeassistant/components/media_player/soundtouch.py | media_title | Anthonymcqueen21/home-assistant | python | @property
def media_title(self):
if (self._status.station_name is not None):
return self._status.station_name
elif (self._status.artist is not None):
return ((self._status.artist + ' - ') + self._status.track)
return None |
@property
def media_duration(self):
'Duration of current playing media in seconds.'
return self._status.duration | 6,310,110,882,926,136,000 | Duration of current playing media in seconds. | homeassistant/components/media_player/soundtouch.py | media_duration | Anthonymcqueen21/home-assistant | python | @property
def media_duration(self):
return self._status.duration |
@property
def media_artist(self):
'Artist of current playing media.'
return self._status.artist | -7,388,192,820,323,627,000 | Artist of current playing media. | homeassistant/components/media_player/soundtouch.py | media_artist | Anthonymcqueen21/home-assistant | python | @property
def media_artist(self):
return self._status.artist |
@property
def media_track(self):
'Artist of current playing media.'
return self._status.track | -7,662,771,650,217,329,000 | Artist of current playing media. | homeassistant/components/media_player/soundtouch.py | media_track | Anthonymcqueen21/home-assistant | python | @property
def media_track(self):
return self._status.track |
@property
def media_album_name(self):
'Album name of current playing media.'
return self._status.album | 5,624,437,247,729,313,000 | Album name of current playing media. | homeassistant/components/media_player/soundtouch.py | media_album_name | Anthonymcqueen21/home-assistant | python | @property
def media_album_name(self):
return self._status.album |
def play_media(self, media_type, media_id, **kwargs):
'Play a piece of media.'
_LOGGER.debug(('Starting media with media_id: ' + str(media_id)))
if re.match('http://', str(media_id)):
_LOGGER.debug('Playing URL %s', str(media_id))
self._device.play_url(str(media_id))
else:
presets = self._device.presets()
preset = next([preset for preset in presets if (preset.preset_id == str(media_id))].__iter__(), None)
if (preset is not None):
_LOGGER.debug(('Playing preset: ' + preset.name))
self._device.select_preset(preset)
else:
_LOGGER.warning(('Unable to find preset with id ' + str(media_id))) | -742,935,728,977,238,100 | Play a piece of media. | homeassistant/components/media_player/soundtouch.py | play_media | Anthonymcqueen21/home-assistant | python | def play_media(self, media_type, media_id, **kwargs):
_LOGGER.debug(('Starting media with media_id: ' + str(media_id)))
if re.match('http://', str(media_id)):
_LOGGER.debug('Playing URL %s', str(media_id))
self._device.play_url(str(media_id))
else:
presets = self._device.presets()
preset = next([preset for preset in presets if (preset.preset_id == str(media_id))].__iter__(), None)
if (preset is not None):
_LOGGER.debug(('Playing preset: ' + preset.name))
self._device.select_preset(preset)
else:
_LOGGER.warning(('Unable to find preset with id ' + str(media_id))) |
def create_zone(self, slaves):
'\n Create a zone (multi-room) and play on selected devices.\n\n :param slaves: slaves on which to play\n\n '
if (not slaves):
_LOGGER.warning('Unable to create zone without slaves')
else:
_LOGGER.info(('Creating zone with master ' + str(self.device.config.name)))
self.device.create_zone([slave.device for slave in slaves]) | -6,557,522,368,038,594,000 | Create a zone (multi-room) and play on selected devices.
:param slaves: slaves on which to play | homeassistant/components/media_player/soundtouch.py | create_zone | Anthonymcqueen21/home-assistant | python | def create_zone(self, slaves):
'\n Create a zone (multi-room) and play on selected devices.\n\n :param slaves: slaves on which to play\n\n '
if (not slaves):
_LOGGER.warning('Unable to create zone without slaves')
else:
_LOGGER.info(('Creating zone with master ' + str(self.device.config.name)))
self.device.create_zone([slave.device for slave in slaves]) |
def remove_zone_slave(self, slaves):
"\n Remove slave(s) from and existing zone (multi-room).\n\n Zone must already exist and slaves array can not be empty.\n Note: If removing last slave, the zone will be deleted and you'll have\n to create a new one. You will not be able to add a new slave anymore\n\n :param slaves: slaves to remove from the zone\n\n "
if (not slaves):
_LOGGER.warning('Unable to find slaves to remove')
else:
_LOGGER.info(('Removing slaves from zone with master ' + str(self.device.config.name)))
self.device.remove_zone_slave([slave.device for slave in slaves]) | -5,533,741,328,883,818,000 | Remove slave(s) from and existing zone (multi-room).
Zone must already exist and slaves array can not be empty.
Note: If removing last slave, the zone will be deleted and you'll have
to create a new one. You will not be able to add a new slave anymore
:param slaves: slaves to remove from the zone | homeassistant/components/media_player/soundtouch.py | remove_zone_slave | Anthonymcqueen21/home-assistant | python | def remove_zone_slave(self, slaves):
"\n Remove slave(s) from and existing zone (multi-room).\n\n Zone must already exist and slaves array can not be empty.\n Note: If removing last slave, the zone will be deleted and you'll have\n to create a new one. You will not be able to add a new slave anymore\n\n :param slaves: slaves to remove from the zone\n\n "
if (not slaves):
_LOGGER.warning('Unable to find slaves to remove')
else:
_LOGGER.info(('Removing slaves from zone with master ' + str(self.device.config.name)))
self.device.remove_zone_slave([slave.device for slave in slaves]) |
def add_zone_slave(self, slaves):
'\n Add slave(s) to and existing zone (multi-room).\n\n Zone must already exist and slaves array can not be empty.\n\n :param slaves:slaves to add\n\n '
if (not slaves):
_LOGGER.warning('Unable to find slaves to add')
else:
_LOGGER.info(('Adding slaves to zone with master ' + str(self.device.config.name)))
self.device.add_zone_slave([slave.device for slave in slaves]) | 7,416,214,219,522,609,000 | Add slave(s) to and existing zone (multi-room).
Zone must already exist and slaves array can not be empty.
:param slaves:slaves to add | homeassistant/components/media_player/soundtouch.py | add_zone_slave | Anthonymcqueen21/home-assistant | python | def add_zone_slave(self, slaves):
'\n Add slave(s) to and existing zone (multi-room).\n\n Zone must already exist and slaves array can not be empty.\n\n :param slaves:slaves to add\n\n '
if (not slaves):
_LOGGER.warning('Unable to find slaves to add')
else:
_LOGGER.info(('Adding slaves to zone with master ' + str(self.device.config.name)))
self.device.add_zone_slave([slave.device for slave in slaves]) |
@cached_property
def openapi_types():
'\n This must be a method because a model may have properties that are\n of type self, this must run after the class is loaded\n\n Returns\n openapi_types (dict): The key is attribute name\n and the value is attribute type.\n '
return {'name': (str,), 'property_id': (str,), 'transaction_type': (str,), 'created_by_transaction_id': (str,), 'amount': (str,)} | 475,727,224,795,010,050 | This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type. | cryptoapis/model/address_tokens_transaction_unconfirmed_omnilayertoken.py | openapi_types | dkremer-ledger/Crypto_APIs_2.0_SDK_Python | python | @cached_property
def openapi_types():
'\n This must be a method because a model may have properties that are\n of type self, this must run after the class is loaded\n\n Returns\n openapi_types (dict): The key is attribute name\n and the value is attribute type.\n '
return {'name': (str,), 'property_id': (str,), 'transaction_type': (str,), 'created_by_transaction_id': (str,), 'amount': (str,)} |
@convert_js_args_to_python_args
def __init__(self, name, property_id, transaction_type, created_by_transaction_id, amount, *args, **kwargs):
'AddressTokensTransactionUnconfirmedOmnilayertoken - a model defined in OpenAPI\n\n Args:\n name (str): Specifies the name of the token.\n property_id (str): Defines the ID of the property for Omni Layer.\n transaction_type (str): Defines the type of the transaction made.\n created_by_transaction_id (str): The transaction ID used to create the token.\n amount (str): Defines the amount of tokens sent with the transaction that is pending confirmation.\n\n Keyword Args:\n _check_type (bool): if True, values for parameters in openapi_types\n will be type checked and a TypeError will be\n raised if the wrong type is input.\n Defaults to True\n _path_to_item (tuple/list): This is a list of keys or values to\n drill down to the model in received_data\n when deserializing a response\n _spec_property_naming (bool): True if the variable names in the input data\n are serialized names, as specified in the OpenAPI document.\n False if the variable names in the input data\n are pythonic names, e.g. snake case (default)\n _configuration (Configuration): the instance to use when\n deserializing a file_type parameter.\n If passed, type conversion is attempted\n If omitted no type conversion is done.\n _visited_composed_classes (tuple): This stores a tuple of\n classes that we have traveled through so that\n if we see that class again we will not use its\n discriminator again.\n When traveling through a discriminator, the\n composed schema that is\n is traveled through is added to this set.\n For example if Animal has a discriminator\n petType and we pass in "Dog", and the class Dog\n allOf includes Animal, we move through Animal\n once using the discriminator, and pick Dog.\n Then in Dog, we will make an instance of the\n Animal class but this time we won\'t travel\n through its discriminator because we passed in\n _visited_composed_classes = (Animal,)\n '
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
self.name = name
self.property_id = property_id
self.transaction_type = transaction_type
self.created_by_transaction_id = created_by_transaction_id
self.amount = amount
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value) | -1,529,628,932,198,324,500 | AddressTokensTransactionUnconfirmedOmnilayertoken - a model defined in OpenAPI
Args:
name (str): Specifies the name of the token.
property_id (str): Defines the ID of the property for Omni Layer.
transaction_type (str): Defines the type of the transaction made.
created_by_transaction_id (str): The transaction ID used to create the token.
amount (str): Defines the amount of tokens sent with the transaction that is pending confirmation.
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,) | cryptoapis/model/address_tokens_transaction_unconfirmed_omnilayertoken.py | __init__ | dkremer-ledger/Crypto_APIs_2.0_SDK_Python | python | @convert_js_args_to_python_args
def __init__(self, name, property_id, transaction_type, created_by_transaction_id, amount, *args, **kwargs):
'AddressTokensTransactionUnconfirmedOmnilayertoken - a model defined in OpenAPI\n\n Args:\n name (str): Specifies the name of the token.\n property_id (str): Defines the ID of the property for Omni Layer.\n transaction_type (str): Defines the type of the transaction made.\n created_by_transaction_id (str): The transaction ID used to create the token.\n amount (str): Defines the amount of tokens sent with the transaction that is pending confirmation.\n\n Keyword Args:\n _check_type (bool): if True, values for parameters in openapi_types\n will be type checked and a TypeError will be\n raised if the wrong type is input.\n Defaults to True\n _path_to_item (tuple/list): This is a list of keys or values to\n drill down to the model in received_data\n when deserializing a response\n _spec_property_naming (bool): True if the variable names in the input data\n are serialized names, as specified in the OpenAPI document.\n False if the variable names in the input data\n are pythonic names, e.g. snake case (default)\n _configuration (Configuration): the instance to use when\n deserializing a file_type parameter.\n If passed, type conversion is attempted\n If omitted no type conversion is done.\n _visited_composed_classes (tuple): This stores a tuple of\n classes that we have traveled through so that\n if we see that class again we will not use its\n discriminator again.\n When traveling through a discriminator, the\n composed schema that is\n is traveled through is added to this set.\n For example if Animal has a discriminator\n petType and we pass in "Dog", and the class Dog\n allOf includes Animal, we move through Animal\n once using the discriminator, and pick Dog.\n Then in Dog, we will make an instance of the\n Animal class but this time we won\'t travel\n through its discriminator because we passed in\n _visited_composed_classes = (Animal,)\n '
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
self.name = name
self.property_id = property_id
self.transaction_type = transaction_type
self.created_by_transaction_id = created_by_transaction_id
self.amount = amount
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value) |
@staticmethod
def get_schema(max_nesting_depth: Optional[int]=6, nesting_depth: int=0, nesting_list: List[str]=[], max_recursion_limit: Optional[int]=2, include_extension: Optional[bool]=False, extension_fields: Optional[List[str]]=['valueBoolean', 'valueCode', 'valueDate', 'valueDateTime', 'valueDecimal', 'valueId', 'valueInteger', 'valuePositiveInt', 'valueString', 'valueTime', 'valueUnsignedInt', 'valueUri', 'valueQuantity'], extension_depth: int=0, max_extension_depth: Optional[int]=2) -> Union[(StructType, DataType)]:
'\n A summary of information based on the results of executing a TestScript.\n\n\n id: unique id for the element within a resource (for internal references). This\n may be any string value that does not contain spaces.\n\n extension: May be used to represent additional information that is not part of the basic\n definition of the element. In order to make the use of extensions safe and\n manageable, there is a strict set of governance applied to the definition and\n use of extensions. Though any implementer is allowed to define an extension,\n there is a set of requirements that SHALL be met as part of the definition of\n the extension.\n\n action: The teardown action will only contain an operation.\n\n '
from spark_fhir_schemas.stu3.complex_types.extension import ExtensionSchema
from spark_fhir_schemas.stu3.complex_types.testreport_action2 import TestReport_Action2Schema
if ((max_recursion_limit and (nesting_list.count('TestReport_Teardown') >= max_recursion_limit)) or (max_nesting_depth and (nesting_depth >= max_nesting_depth))):
return StructType([StructField('id', StringType(), True)])
my_nesting_list: List[str] = (nesting_list + ['TestReport_Teardown'])
schema = StructType([StructField('id', StringType(), True), StructField('extension', ArrayType(ExtensionSchema.get_schema(max_nesting_depth=max_nesting_depth, nesting_depth=(nesting_depth + 1), nesting_list=my_nesting_list, max_recursion_limit=max_recursion_limit, include_extension=include_extension, extension_fields=extension_fields, extension_depth=extension_depth, max_extension_depth=max_extension_depth)), True), StructField('action', ArrayType(TestReport_Action2Schema.get_schema(max_nesting_depth=max_nesting_depth, nesting_depth=(nesting_depth + 1), nesting_list=my_nesting_list, max_recursion_limit=max_recursion_limit, include_extension=include_extension, extension_fields=extension_fields, extension_depth=extension_depth, max_extension_depth=max_extension_depth)), True)])
if (not include_extension):
schema.fields = [(c if (c.name != 'extension') else StructField('extension', StringType(), True)) for c in schema.fields]
return schema | 2,466,369,165,751,653,000 | A summary of information based on the results of executing a TestScript.
id: unique id for the element within a resource (for internal references). This
may be any string value that does not contain spaces.
extension: May be used to represent additional information that is not part of the basic
definition of the element. In order to make the use of extensions safe and
manageable, there is a strict set of governance applied to the definition and
use of extensions. Though any implementer is allowed to define an extension,
there is a set of requirements that SHALL be met as part of the definition of
the extension.
action: The teardown action will only contain an operation. | spark_fhir_schemas/stu3/complex_types/testreport_teardown.py | get_schema | icanbwell/SparkFhirSchemas | python | @staticmethod
def get_schema(max_nesting_depth: Optional[int]=6, nesting_depth: int=0, nesting_list: List[str]=[], max_recursion_limit: Optional[int]=2, include_extension: Optional[bool]=False, extension_fields: Optional[List[str]]=['valueBoolean', 'valueCode', 'valueDate', 'valueDateTime', 'valueDecimal', 'valueId', 'valueInteger', 'valuePositiveInt', 'valueString', 'valueTime', 'valueUnsignedInt', 'valueUri', 'valueQuantity'], extension_depth: int=0, max_extension_depth: Optional[int]=2) -> Union[(StructType, DataType)]:
'\n A summary of information based on the results of executing a TestScript.\n\n\n id: unique id for the element within a resource (for internal references). This\n may be any string value that does not contain spaces.\n\n extension: May be used to represent additional information that is not part of the basic\n definition of the element. In order to make the use of extensions safe and\n manageable, there is a strict set of governance applied to the definition and\n use of extensions. Though any implementer is allowed to define an extension,\n there is a set of requirements that SHALL be met as part of the definition of\n the extension.\n\n action: The teardown action will only contain an operation.\n\n '
from spark_fhir_schemas.stu3.complex_types.extension import ExtensionSchema
from spark_fhir_schemas.stu3.complex_types.testreport_action2 import TestReport_Action2Schema
if ((max_recursion_limit and (nesting_list.count('TestReport_Teardown') >= max_recursion_limit)) or (max_nesting_depth and (nesting_depth >= max_nesting_depth))):
return StructType([StructField('id', StringType(), True)])
my_nesting_list: List[str] = (nesting_list + ['TestReport_Teardown'])
schema = StructType([StructField('id', StringType(), True), StructField('extension', ArrayType(ExtensionSchema.get_schema(max_nesting_depth=max_nesting_depth, nesting_depth=(nesting_depth + 1), nesting_list=my_nesting_list, max_recursion_limit=max_recursion_limit, include_extension=include_extension, extension_fields=extension_fields, extension_depth=extension_depth, max_extension_depth=max_extension_depth)), True), StructField('action', ArrayType(TestReport_Action2Schema.get_schema(max_nesting_depth=max_nesting_depth, nesting_depth=(nesting_depth + 1), nesting_list=my_nesting_list, max_recursion_limit=max_recursion_limit, include_extension=include_extension, extension_fields=extension_fields, extension_depth=extension_depth, max_extension_depth=max_extension_depth)), True)])
if (not include_extension):
schema.fields = [(c if (c.name != 'extension') else StructField('extension', StringType(), True)) for c in schema.fields]
return schema |
def convert_to_amp(model: nn.Module, optimizer: Optimizer, criterion: _Loss, mode: AMP_TYPE, amp_config: Config=None):
'A helper function to wrap training components with Torch AMP modules.\n\n Args:\n param model (:class:`torch.nn.Module`): your model object.\n optimizer (:class:`torch.optim.Optimizer`): your optimizer object.\n criterion (:class:`torch.nn.modules.loss._Loss`): your loss function object.\n mode (:class:`colossalai.amp.AMP_TYPE`): amp mode.\n amp_config (Union[:class:`colossalai.context.Config`, dict]): configuration for different amp modes.\n\n Returns:\n A tuple (model, optimizer, criterion).\n\n Note:\n ``amp_config`` may vary from different mode you choose. You should check the corresponding amp mode\n for more details about ``amp_config``.\n For ``apex_amp``, please check\n `apex_amp config <https://nvidia.github.io/apex/amp.html?highlight=apex%20amp>`_.\n For ``naive_amp``, please check\n `naive_amp config <https://github.com/hpcaitech/ColossalAI/blob/main/colossalai/amp/naive_amp/_fp16_optimizer.py#L42>`_.\n For ``torch_amp``, please check\n `torch_amp config <https://github.com/pytorch/pytorch/blob/master/torch/cuda/amp/grad_scaler.py#L97>`_.\n '
assert isinstance(mode, AMP_TYPE), f'expected the argument mode be AMP_TYPE, but got {type(mode)}'
if (amp_config is None):
amp_config = Config()
if (mode == AMP_TYPE.TORCH):
(model, optimizer, criterion) = convert_to_torch_amp(model, optimizer, criterion, amp_config)
elif (mode == AMP_TYPE.APEX):
(model, optimizer) = convert_to_apex_amp(model, optimizer, amp_config)
elif (mode == AMP_TYPE.NAIVE):
(model, optimizer) = convert_to_naive_amp(model, optimizer, amp_config)
return (model, optimizer, criterion) | 7,606,609,365,008,932,000 | A helper function to wrap training components with Torch AMP modules.
Args:
param model (:class:`torch.nn.Module`): your model object.
optimizer (:class:`torch.optim.Optimizer`): your optimizer object.
criterion (:class:`torch.nn.modules.loss._Loss`): your loss function object.
mode (:class:`colossalai.amp.AMP_TYPE`): amp mode.
amp_config (Union[:class:`colossalai.context.Config`, dict]): configuration for different amp modes.
Returns:
A tuple (model, optimizer, criterion).
Note:
``amp_config`` may vary from different mode you choose. You should check the corresponding amp mode
for more details about ``amp_config``.
For ``apex_amp``, please check
`apex_amp config <https://nvidia.github.io/apex/amp.html?highlight=apex%20amp>`_.
For ``naive_amp``, please check
`naive_amp config <https://github.com/hpcaitech/ColossalAI/blob/main/colossalai/amp/naive_amp/_fp16_optimizer.py#L42>`_.
For ``torch_amp``, please check
`torch_amp config <https://github.com/pytorch/pytorch/blob/master/torch/cuda/amp/grad_scaler.py#L97>`_. | colossalai/amp/__init__.py | convert_to_amp | Cautiousss/ColossalAI | python | def convert_to_amp(model: nn.Module, optimizer: Optimizer, criterion: _Loss, mode: AMP_TYPE, amp_config: Config=None):
'A helper function to wrap training components with Torch AMP modules.\n\n Args:\n param model (:class:`torch.nn.Module`): your model object.\n optimizer (:class:`torch.optim.Optimizer`): your optimizer object.\n criterion (:class:`torch.nn.modules.loss._Loss`): your loss function object.\n mode (:class:`colossalai.amp.AMP_TYPE`): amp mode.\n amp_config (Union[:class:`colossalai.context.Config`, dict]): configuration for different amp modes.\n\n Returns:\n A tuple (model, optimizer, criterion).\n\n Note:\n ``amp_config`` may vary from different mode you choose. You should check the corresponding amp mode\n for more details about ``amp_config``.\n For ``apex_amp``, please check\n `apex_amp config <https://nvidia.github.io/apex/amp.html?highlight=apex%20amp>`_.\n For ``naive_amp``, please check\n `naive_amp config <https://github.com/hpcaitech/ColossalAI/blob/main/colossalai/amp/naive_amp/_fp16_optimizer.py#L42>`_.\n For ``torch_amp``, please check\n `torch_amp config <https://github.com/pytorch/pytorch/blob/master/torch/cuda/amp/grad_scaler.py#L97>`_.\n '
assert isinstance(mode, AMP_TYPE), f'expected the argument mode be AMP_TYPE, but got {type(mode)}'
if (amp_config is None):
amp_config = Config()
if (mode == AMP_TYPE.TORCH):
(model, optimizer, criterion) = convert_to_torch_amp(model, optimizer, criterion, amp_config)
elif (mode == AMP_TYPE.APEX):
(model, optimizer) = convert_to_apex_amp(model, optimizer, amp_config)
elif (mode == AMP_TYPE.NAIVE):
(model, optimizer) = convert_to_naive_amp(model, optimizer, amp_config)
return (model, optimizer, criterion) |
def get_df_with_query(self, query):
" WARNING :: Will crash if too large. If so, you should just create the df file\n first via create_df_file(query=).\n\n load example:\n with open(input, 'rb') as infile:\n objs = []\n while True:\n try:\n obj = pickle.load(infile)\n except EOFError:\n break\n ...\n "
return pd.read_sql(query, self.engine) | -8,687,322,850,089,922,000 | WARNING :: Will crash if too large. If so, you should just create the df file
first via create_df_file(query=).
load example:
with open(input, 'rb') as infile:
objs = []
while True:
try:
obj = pickle.load(infile)
except EOFError:
break
... | ilxutils/ilxutils/database_client.py | get_df_with_query | MCSZ/pyontutils | python | def get_df_with_query(self, query):
" WARNING :: Will crash if too large. If so, you should just create the df file\n first via create_df_file(query=).\n\n load example:\n with open(input, 'rb') as infile:\n objs = []\n while True:\n try:\n obj = pickle.load(infile)\n except EOFError:\n break\n ...\n "
return pd.read_sql(query, self.engine) |
def create_df_file_with_query(self, query, output):
' Dumps in df in chunks to avoid crashes.\n '
chunk_size = 100000
offset = 0
data = defaultdict((lambda : defaultdict(list)))
with open(output, 'wb') as outfile:
query = query.replace(';', '')
query += ' LIMIT {chunk_size} OFFSET {offset};'
while True:
print(offset)
query = query.format(chunk_size=chunk_size, offset=offset)
df = pd.read_sql(query, self.engine)
pickle.dump(df, outfile)
offset += chunk_size
if (len(df) < chunk_size):
break
outfile.close() | -3,148,335,562,757,280,300 | Dumps in df in chunks to avoid crashes. | ilxutils/ilxutils/database_client.py | create_df_file_with_query | MCSZ/pyontutils | python | def create_df_file_with_query(self, query, output):
' \n '
chunk_size = 100000
offset = 0
data = defaultdict((lambda : defaultdict(list)))
with open(output, 'wb') as outfile:
query = query.replace(';', )
query += ' LIMIT {chunk_size} OFFSET {offset};'
while True:
print(offset)
query = query.format(chunk_size=chunk_size, offset=offset)
df = pd.read_sql(query, self.engine)
pickle.dump(df, outfile)
offset += chunk_size
if (len(df) < chunk_size):
break
outfile.close() |
async def begin_download(self, resource_group_name: str, virtual_wan_name: str, request: '_models.GetVpnSitesConfigurationRequest', **kwargs) -> AsyncLROPoller[None]:
"Gives the sas-url to download the configurations for vpn-sites in a resource group.\n\n :param resource_group_name: The resource group name.\n :type resource_group_name: str\n :param virtual_wan_name: The name of the VirtualWAN for which configuration of all vpn-sites is\n needed.\n :type virtual_wan_name: str\n :param request: Parameters supplied to download vpn-sites configuration.\n :type request: ~azure.mgmt.network.v2020_06_01.models.GetVpnSitesConfigurationRequest\n :keyword callable cls: A custom type or function that will be passed the direct response\n :keyword str continuation_token: A continuation token to restart a poller from a saved state.\n :keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,\n False for no polling, or your own initialized polling object for a personal polling strategy.\n :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod\n :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.\n :return: An instance of AsyncLROPoller that returns either None or the result of cls(response)\n :rtype: ~azure.core.polling.AsyncLROPoller[None]\n :raises ~azure.core.exceptions.HttpResponseError:\n "
polling = kwargs.pop('polling', True)
cls = kwargs.pop('cls', None)
lro_delay = kwargs.pop('polling_interval', self._config.polling_interval)
cont_token = kwargs.pop('continuation_token', None)
if (cont_token is None):
raw_result = (await self._download_initial(resource_group_name=resource_group_name, virtual_wan_name=virtual_wan_name, request=request, cls=(lambda x, y, z: x), **kwargs))
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {'subscriptionId': self._serialize.url('self._config.subscription_id', self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url('resource_group_name', resource_group_name, 'str'), 'virtualWANName': self._serialize.url('virtual_wan_name', virtual_wan_name, 'str')}
if (polling is True):
polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif (polling is False):
polling_method = AsyncNoPolling()
else:
polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) | 3,853,412,763,086,890,000 | Gives the sas-url to download the configurations for vpn-sites in a resource group.
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param virtual_wan_name: The name of the VirtualWAN for which configuration of all vpn-sites is
needed.
:type virtual_wan_name: str
:param request: Parameters supplied to download vpn-sites configuration.
:type request: ~azure.mgmt.network.v2020_06_01.models.GetVpnSitesConfigurationRequest
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError: | sdk/network/azure-mgmt-network/azure/mgmt/network/v2020_06_01/aio/operations/_vpn_sites_configuration_operations.py | begin_download | AriZavala2/azure-sdk-for-python | python | async def begin_download(self, resource_group_name: str, virtual_wan_name: str, request: '_models.GetVpnSitesConfigurationRequest', **kwargs) -> AsyncLROPoller[None]:
"Gives the sas-url to download the configurations for vpn-sites in a resource group.\n\n :param resource_group_name: The resource group name.\n :type resource_group_name: str\n :param virtual_wan_name: The name of the VirtualWAN for which configuration of all vpn-sites is\n needed.\n :type virtual_wan_name: str\n :param request: Parameters supplied to download vpn-sites configuration.\n :type request: ~azure.mgmt.network.v2020_06_01.models.GetVpnSitesConfigurationRequest\n :keyword callable cls: A custom type or function that will be passed the direct response\n :keyword str continuation_token: A continuation token to restart a poller from a saved state.\n :keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,\n False for no polling, or your own initialized polling object for a personal polling strategy.\n :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod\n :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.\n :return: An instance of AsyncLROPoller that returns either None or the result of cls(response)\n :rtype: ~azure.core.polling.AsyncLROPoller[None]\n :raises ~azure.core.exceptions.HttpResponseError:\n "
polling = kwargs.pop('polling', True)
cls = kwargs.pop('cls', None)
lro_delay = kwargs.pop('polling_interval', self._config.polling_interval)
cont_token = kwargs.pop('continuation_token', None)
if (cont_token is None):
raw_result = (await self._download_initial(resource_group_name=resource_group_name, virtual_wan_name=virtual_wan_name, request=request, cls=(lambda x, y, z: x), **kwargs))
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {'subscriptionId': self._serialize.url('self._config.subscription_id', self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url('resource_group_name', resource_group_name, 'str'), 'virtualWANName': self._serialize.url('virtual_wan_name', virtual_wan_name, 'str')}
if (polling is True):
polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif (polling is False):
polling_method = AsyncNoPolling()
else:
polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) |
def testaArq(arq):
'\n -> Verifica se existe o arquivo arq\n :arq: Nome do arquivo a ser testado.\n :return: retorna True se o arquivo for encontrado,\n caso contrário False\n '
try:
a = open(arq)
except FileNotFoundError:
print('Arquivo não encontrado!')
return False
else:
return True | -6,764,541,411,200,980,000 | -> Verifica se existe o arquivo arq
:arq: Nome do arquivo a ser testado.
:return: retorna True se o arquivo for encontrado,
caso contrário False | bibli/arquivo/__init__.py | testaArq | EduardoPessanha/Python | python | def testaArq(arq):
'\n -> Verifica se existe o arquivo arq\n :arq: Nome do arquivo a ser testado.\n :return: retorna True se o arquivo for encontrado,\n caso contrário False\n '
try:
a = open(arq)
except FileNotFoundError:
print('Arquivo não encontrado!')
return False
else:
return True |
def criaArq(arq=''):
'\n -> Cria um arquivo de texto, caso ele não exista.\n :param arq: Nome do arquivo.\n :return:\n '
try:
a = open(arq, 'xt')
except FileExistsError:
print(f'ERRO: o arquivo "{arq}" já existe!')
else:
print(f'O arquivo "{arq}" foi criado com sucesso!')
finally:
a.close()
return | -2,465,597,370,708,236,000 | -> Cria um arquivo de texto, caso ele não exista.
:param arq: Nome do arquivo.
:return: | bibli/arquivo/__init__.py | criaArq | EduardoPessanha/Python | python | def criaArq(arq=):
'\n -> Cria um arquivo de texto, caso ele não exista.\n :param arq: Nome do arquivo.\n :return:\n '
try:
a = open(arq, 'xt')
except FileExistsError:
print(f'ERRO: o arquivo "{arq}" já existe!')
else:
print(f'O arquivo "{arq}" foi criado com sucesso!')
finally:
a.close()
return |
def leArq(arq=''):
'\n -> Abre e mostra os itens de um arquivo texto.\n :param arq: Nome do arquivo.\n :return:\n '
return | -2,733,973,352,055,925,000 | -> Abre e mostra os itens de um arquivo texto.
:param arq: Nome do arquivo.
:return: | bibli/arquivo/__init__.py | leArq | EduardoPessanha/Python | python | def leArq(arq=):
'\n -> Abre e mostra os itens de um arquivo texto.\n :param arq: Nome do arquivo.\n :return:\n '
return |
def editaArq(arq):
'\n -> Abre um arquivo de texto e adiciona novo item no \n final do arquivo.\n :param arq: Nome do arquivo.\n :return:\n '
return | -5,919,721,813,091,252,000 | -> Abre um arquivo de texto e adiciona novo item no
final do arquivo.
:param arq: Nome do arquivo.
:return: | bibli/arquivo/__init__.py | editaArq | EduardoPessanha/Python | python | def editaArq(arq):
'\n -> Abre um arquivo de texto e adiciona novo item no \n final do arquivo.\n :param arq: Nome do arquivo.\n :return:\n '
return |
def _video_embedding(checkpoint_path: str):
'Load the video embedding for the BraVe model to evaluate.'
checkpoint = np.load(checkpoint_path, allow_pickle=True).item()
params = checkpoint['params']
state = checkpoint['state']
brave_config_dct = checkpoint['config']
brave_config = brave.BraveConfig(**brave_config_dct)
model = brave.get_model(brave_config)
@jax.jit
def embedding_fn(view: datasets.View) -> chex.Array:
narrow_forward_fn = model.forward_fns['narrow_video']
(embedding, _) = narrow_forward_fn(params, state, None, view, False)
return embedding
def synchronous_embedding_fn(view: datasets.View) -> chex.Array:
return jax.device_get(embedding_fn(view))
return synchronous_embedding_fn | -4,326,589,488,126,686,700 | Load the video embedding for the BraVe model to evaluate. | brave/evaluate_video_embeddings.py | _video_embedding | deepmind/brave | python | def _video_embedding(checkpoint_path: str):
checkpoint = np.load(checkpoint_path, allow_pickle=True).item()
params = checkpoint['params']
state = checkpoint['state']
brave_config_dct = checkpoint['config']
brave_config = brave.BraveConfig(**brave_config_dct)
model = brave.get_model(brave_config)
@jax.jit
def embedding_fn(view: datasets.View) -> chex.Array:
narrow_forward_fn = model.forward_fns['narrow_video']
(embedding, _) = narrow_forward_fn(params, state, None, view, False)
return embedding
def synchronous_embedding_fn(view: datasets.View) -> chex.Array:
return jax.device_get(embedding_fn(view))
return synchronous_embedding_fn |
def Instance(self):
' This function has been arbitrarily put into the stubs'
return DataGridViewAutoSizeColumnMode() | -469,363,876,814,707,100 | This function has been arbitrarily put into the stubs | release/stubs.min/System/Windows/Forms/__init___parts/DataGridViewAutoSizeColumnMode.py | Instance | tranconbv/ironpython-stubs | python | def Instance(self):
' '
return DataGridViewAutoSizeColumnMode() |
def __eq__(self, *args):
' x.__eq__(y) <==> x==yx.__eq__(y) <==> x==yx.__eq__(y) <==> x==y '
pass | 2,144,965,521,805,394,200 | x.__eq__(y) <==> x==yx.__eq__(y) <==> x==yx.__eq__(y) <==> x==y | release/stubs.min/System/Windows/Forms/__init___parts/DataGridViewAutoSizeColumnMode.py | __eq__ | tranconbv/ironpython-stubs | python | def __eq__(self, *args):
' '
pass |
def __format__(self, *args):
' __format__(formattable: IFormattable,format: str) -> str '
pass | -4,894,195,495,142,889,000 | __format__(formattable: IFormattable,format: str) -> str | release/stubs.min/System/Windows/Forms/__init___parts/DataGridViewAutoSizeColumnMode.py | __format__ | tranconbv/ironpython-stubs | python | def __format__(self, *args):
' '
pass |
def __init__(self, *args):
' x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature '
pass | -90,002,593,062,007,400 | x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature | release/stubs.min/System/Windows/Forms/__init___parts/DataGridViewAutoSizeColumnMode.py | __init__ | tranconbv/ironpython-stubs | python | def __init__(self, *args):
' '
pass |
@unittest.skipIf(interpreter_requires_environment(), 'Cannot run -E tests when PYTHON env vars are required.')
def test_env_var_ignored_with_E(self):
'PYTHON* environment variables must be ignored when -E is present.'
code = 'import tracemalloc; print(tracemalloc.is_tracing())'
(ok, stdout, stderr) = assert_python_ok('-E', '-c', code, PYTHONTRACEMALLOC='1')
stdout = stdout.rstrip()
self.assertEqual(stdout, b'False') | 370,077,749,898,084,600 | PYTHON* environment variables must be ignored when -E is present. | python3/Python-3.6.1/Lib/test/test_tracemalloc.py | test_env_var_ignored_with_E | BrainSpawnInfosphere/raspbian_pkgs | python | @unittest.skipIf(interpreter_requires_environment(), 'Cannot run -E tests when PYTHON env vars are required.')
def test_env_var_ignored_with_E(self):
code = 'import tracemalloc; print(tracemalloc.is_tracing())'
(ok, stdout, stderr) = assert_python_ok('-E', '-c', code, PYTHONTRACEMALLOC='1')
stdout = stdout.rstrip()
self.assertEqual(stdout, b'False') |
def setup_DA_params(self):
'\n net_num_pool_op_kernel_sizes is different in resunet\n '
super().setup_DA_params()
self.deep_supervision_scales = ([[1, 1, 1]] + list((list(i) for i in (1 / np.cumprod(np.vstack(self.net_num_pool_op_kernel_sizes[1:]), axis=0))))[:(- 1)]) | 91,199,006,713,276,660 | net_num_pool_op_kernel_sizes is different in resunet | nnunet/training/network_training/nnUNet_variants/architectural_variants/nnUNetTrainerV2_ResencUNet.py | setup_DA_params | ADVasculatureProject/nnUNet | python | def setup_DA_params(self):
'\n \n '
super().setup_DA_params()
self.deep_supervision_scales = ([[1, 1, 1]] + list((list(i) for i in (1 / np.cumprod(np.vstack(self.net_num_pool_op_kernel_sizes[1:]), axis=0))))[:(- 1)]) |
def __enter__(self) -> None:
'\n Runs the event loop for application startup.\n '
self.loop.create_task(self.run())
self.loop.run_until_complete(self.startup()) | -5,961,435,231,847,981,000 | Runs the event loop for application startup. | mangum/protocols/lifespan.py | __enter__ | IlyaSukhanov/mangum | python | def __enter__(self) -> None:
'\n \n '
self.loop.create_task(self.run())
self.loop.run_until_complete(self.startup()) |
def __exit__(self, exc_type: typing.Optional[typing.Type[BaseException]], exc_value: typing.Optional[BaseException], traceback: typing.Optional[types.TracebackType]) -> None:
'\n Runs the event loop for application shutdown.\n '
self.loop.run_until_complete(self.shutdown()) | 1,610,925,060,029,687,000 | Runs the event loop for application shutdown. | mangum/protocols/lifespan.py | __exit__ | IlyaSukhanov/mangum | python | def __exit__(self, exc_type: typing.Optional[typing.Type[BaseException]], exc_value: typing.Optional[BaseException], traceback: typing.Optional[types.TracebackType]) -> None:
'\n \n '
self.loop.run_until_complete(self.shutdown()) |
async def run(self) -> None:
'\n Calls the application with the `lifespan` connection scope.\n '
try:
(await self.app({'type': 'lifespan'}, self.receive, self.send))
except LifespanUnsupported:
self.logger.info("ASGI 'lifespan' protocol appears unsupported.")
except (LifespanFailure, UnexpectedMessage) as exc:
self.exception = exc
except BaseException as exc:
self.logger.error("Exception in 'lifespan' protocol.", exc_info=exc)
finally:
self.startup_event.set()
self.shutdown_event.set() | 7,224,950,021,407,085,000 | Calls the application with the `lifespan` connection scope. | mangum/protocols/lifespan.py | run | IlyaSukhanov/mangum | python | async def run(self) -> None:
'\n \n '
try:
(await self.app({'type': 'lifespan'}, self.receive, self.send))
except LifespanUnsupported:
self.logger.info("ASGI 'lifespan' protocol appears unsupported.")
except (LifespanFailure, UnexpectedMessage) as exc:
self.exception = exc
except BaseException as exc:
self.logger.error("Exception in 'lifespan' protocol.", exc_info=exc)
finally:
self.startup_event.set()
self.shutdown_event.set() |
async def receive(self) -> Message:
'\n Awaited by the application to receive ASGI `lifespan` events.\n '
if (self.state is LifespanCycleState.CONNECTING):
self.state = LifespanCycleState.STARTUP
elif (self.state is LifespanCycleState.STARTUP):
self.state = LifespanCycleState.SHUTDOWN
return (await self.app_queue.get()) | 9,037,355,104,793,460,000 | Awaited by the application to receive ASGI `lifespan` events. | mangum/protocols/lifespan.py | receive | IlyaSukhanov/mangum | python | async def receive(self) -> Message:
'\n \n '
if (self.state is LifespanCycleState.CONNECTING):
self.state = LifespanCycleState.STARTUP
elif (self.state is LifespanCycleState.STARTUP):
self.state = LifespanCycleState.SHUTDOWN
return (await self.app_queue.get()) |
async def send(self, message: Message) -> None:
'\n Awaited by the application to send ASGI `lifespan` events.\n '
message_type = message['type']
self.logger.info("%s: '%s' event received from application.", self.state, message_type)
if (self.state is LifespanCycleState.CONNECTING):
if (self.lifespan == 'on'):
raise LifespanFailure("Lifespan connection failed during startup and lifespan is 'on'.")
self.state = LifespanCycleState.UNSUPPORTED
raise LifespanUnsupported('Lifespan protocol appears unsupported.')
if (message_type not in ('lifespan.startup.complete', 'lifespan.shutdown.complete', 'lifespan.startup.failed', 'lifespan.shutdown.failed')):
self.state = LifespanCycleState.FAILED
raise UnexpectedMessage(f"Unexpected '{message_type}' event received.")
if (self.state is LifespanCycleState.STARTUP):
if (message_type == 'lifespan.startup.complete'):
self.startup_event.set()
elif (message_type == 'lifespan.startup.failed'):
self.state = LifespanCycleState.FAILED
self.startup_event.set()
message = message.get('message', '')
raise LifespanFailure(f'Lifespan startup failure. {message}')
elif (self.state is LifespanCycleState.SHUTDOWN):
if (message_type == 'lifespan.shutdown.complete'):
self.shutdown_event.set()
elif (message_type == 'lifespan.shutdown.failed'):
self.state = LifespanCycleState.FAILED
self.shutdown_event.set()
message = message.get('message', '')
raise LifespanFailure(f'Lifespan shutdown failure. {message}') | 555,154,299,804,281,660 | Awaited by the application to send ASGI `lifespan` events. | mangum/protocols/lifespan.py | send | IlyaSukhanov/mangum | python | async def send(self, message: Message) -> None:
'\n \n '
message_type = message['type']
self.logger.info("%s: '%s' event received from application.", self.state, message_type)
if (self.state is LifespanCycleState.CONNECTING):
if (self.lifespan == 'on'):
raise LifespanFailure("Lifespan connection failed during startup and lifespan is 'on'.")
self.state = LifespanCycleState.UNSUPPORTED
raise LifespanUnsupported('Lifespan protocol appears unsupported.')
if (message_type not in ('lifespan.startup.complete', 'lifespan.shutdown.complete', 'lifespan.startup.failed', 'lifespan.shutdown.failed')):
self.state = LifespanCycleState.FAILED
raise UnexpectedMessage(f"Unexpected '{message_type}' event received.")
if (self.state is LifespanCycleState.STARTUP):
if (message_type == 'lifespan.startup.complete'):
self.startup_event.set()
elif (message_type == 'lifespan.startup.failed'):
self.state = LifespanCycleState.FAILED
self.startup_event.set()
message = message.get('message', )
raise LifespanFailure(f'Lifespan startup failure. {message}')
elif (self.state is LifespanCycleState.SHUTDOWN):
if (message_type == 'lifespan.shutdown.complete'):
self.shutdown_event.set()
elif (message_type == 'lifespan.shutdown.failed'):
self.state = LifespanCycleState.FAILED
self.shutdown_event.set()
message = message.get('message', )
raise LifespanFailure(f'Lifespan shutdown failure. {message}') |
async def startup(self) -> None:
'\n Pushes the `lifespan` startup event to application queue and handles errors.\n '
self.logger.info('Waiting for application startup.')
(await self.app_queue.put({'type': 'lifespan.startup'}))
(await self.startup_event.wait())
if (self.state is LifespanCycleState.FAILED):
raise LifespanFailure(self.exception)
if (not self.exception):
self.logger.info('Application startup complete.')
else:
self.logger.info('Application startup failed.') | -6,644,141,905,951,629,000 | Pushes the `lifespan` startup event to application queue and handles errors. | mangum/protocols/lifespan.py | startup | IlyaSukhanov/mangum | python | async def startup(self) -> None:
'\n \n '
self.logger.info('Waiting for application startup.')
(await self.app_queue.put({'type': 'lifespan.startup'}))
(await self.startup_event.wait())
if (self.state is LifespanCycleState.FAILED):
raise LifespanFailure(self.exception)
if (not self.exception):
self.logger.info('Application startup complete.')
else:
self.logger.info('Application startup failed.') |
async def shutdown(self) -> None:
'\n Pushes the `lifespan` shutdown event to application queue and handles errors.\n '
self.logger.info('Waiting for application shutdown.')
(await self.app_queue.put({'type': 'lifespan.shutdown'}))
(await self.shutdown_event.wait())
if (self.state is LifespanCycleState.FAILED):
raise LifespanFailure(self.exception) | 8,278,047,386,884,461,000 | Pushes the `lifespan` shutdown event to application queue and handles errors. | mangum/protocols/lifespan.py | shutdown | IlyaSukhanov/mangum | python | async def shutdown(self) -> None:
'\n \n '
self.logger.info('Waiting for application shutdown.')
(await self.app_queue.put({'type': 'lifespan.shutdown'}))
(await self.shutdown_event.wait())
if (self.state is LifespanCycleState.FAILED):
raise LifespanFailure(self.exception) |
@property
def vectorized(self):
'Vectorized or not.'
return True | 8,761,219,097,070,709,000 | Vectorized or not. | src/metarl/tf/policies/categorical_mlp_policy.py | vectorized | icml2020submission6857/metarl | python | @property
def vectorized(self):
return True |
def dist_info_sym(self, obs_var, state_info_vars=None, name=None):
'Symbolic graph of the distribution.'
with tf.compat.v1.variable_scope(self._variable_scope):
prob = self.model.build(obs_var, name=name)
return dict(prob=prob) | -3,295,666,032,268,184,600 | Symbolic graph of the distribution. | src/metarl/tf/policies/categorical_mlp_policy.py | dist_info_sym | icml2020submission6857/metarl | python | def dist_info_sym(self, obs_var, state_info_vars=None, name=None):
with tf.compat.v1.variable_scope(self._variable_scope):
prob = self.model.build(obs_var, name=name)
return dict(prob=prob) |
def dist_info(self, obs, state_infos=None):
'Distribution info.'
prob = self._f_prob(obs)
return dict(prob=prob) | -5,666,997,249,009,512,000 | Distribution info. | src/metarl/tf/policies/categorical_mlp_policy.py | dist_info | icml2020submission6857/metarl | python | def dist_info(self, obs, state_infos=None):
prob = self._f_prob(obs)
return dict(prob=prob) |
def get_action(self, observation):
'Return a single action.'
flat_obs = self.observation_space.flatten(observation)
prob = self._f_prob([flat_obs])[0]
action = self.action_space.weighted_sample(prob)
return (action, dict(prob=prob)) | 8,386,072,811,127,983,000 | Return a single action. | src/metarl/tf/policies/categorical_mlp_policy.py | get_action | icml2020submission6857/metarl | python | def get_action(self, observation):
flat_obs = self.observation_space.flatten(observation)
prob = self._f_prob([flat_obs])[0]
action = self.action_space.weighted_sample(prob)
return (action, dict(prob=prob)) |
def get_actions(self, observations):
'Return multiple actions.'
flat_obs = self.observation_space.flatten_n(observations)
probs = self._f_prob(flat_obs)
actions = list(map(self.action_space.weighted_sample, probs))
return (actions, dict(prob=probs)) | 8,948,528,764,209,231,000 | Return multiple actions. | src/metarl/tf/policies/categorical_mlp_policy.py | get_actions | icml2020submission6857/metarl | python | def get_actions(self, observations):
flat_obs = self.observation_space.flatten_n(observations)
probs = self._f_prob(flat_obs)
actions = list(map(self.action_space.weighted_sample, probs))
return (actions, dict(prob=probs)) |
def get_regularizable_vars(self):
'Get regularizable weight variables under the Policy scope.'
trainable = self.get_trainable_vars()
return [var for var in trainable if (('hidden' in var.name) and ('kernel' in var.name))] | 2,844,257,466,489,497,000 | Get regularizable weight variables under the Policy scope. | src/metarl/tf/policies/categorical_mlp_policy.py | get_regularizable_vars | icml2020submission6857/metarl | python | def get_regularizable_vars(self):
trainable = self.get_trainable_vars()
return [var for var in trainable if (('hidden' in var.name) and ('kernel' in var.name))] |
@property
def distribution(self):
'Policy distribution.'
return Categorical(self.action_dim) | 2,591,793,809,372,804,600 | Policy distribution. | src/metarl/tf/policies/categorical_mlp_policy.py | distribution | icml2020submission6857/metarl | python | @property
def distribution(self):
return Categorical(self.action_dim) |
def __getstate__(self):
'Object.__getstate__.'
new_dict = super().__getstate__()
del new_dict['_f_prob']
return new_dict | 4,697,696,970,018,978,000 | Object.__getstate__. | src/metarl/tf/policies/categorical_mlp_policy.py | __getstate__ | icml2020submission6857/metarl | python | def __getstate__(self):
new_dict = super().__getstate__()
del new_dict['_f_prob']
return new_dict |
def __setstate__(self, state):
'Object.__setstate__.'
super().__setstate__(state)
self._initialize() | 860,091,733,351,240,400 | Object.__setstate__. | src/metarl/tf/policies/categorical_mlp_policy.py | __setstate__ | icml2020submission6857/metarl | python | def __setstate__(self, state):
super().__setstate__(state)
self._initialize() |
def getConstituents(jet, node_id, outers_list):
'\n Recursive function to get a list of the tree leaves\n '
if (jet['tree'][(node_id, 0)] == (- 1)):
outers_list.append(jet['content'][node_id])
else:
getConstituents(jet, jet['tree'][(node_id, 0)], outers_list)
getConstituents(jet, jet['tree'][(node_id, 1)], outers_list)
return outers_list | -6,086,853,602,560,528,000 | Recursive function to get a list of the tree leaves | src/ClusterTrellis/utils.py | getConstituents | SebastianMacaluso/ClusterTrellis | python | def getConstituents(jet, node_id, outers_list):
'\n \n '
if (jet['tree'][(node_id, 0)] == (- 1)):
outers_list.append(jet['content'][node_id])
else:
getConstituents(jet, jet['tree'][(node_id, 0)], outers_list)
getConstituents(jet, jet['tree'][(node_id, 1)], outers_list)
return outers_list |
def clone(self) -> Any:
'Clone a definition expression node.'
return deepcopy(self) | -6,875,111,423,679,963,000 | Clone a definition expression node. | sphinx/util/cfamily.py | clone | OliverSieweke/sphinx | python | def clone(self) -> Any:
return deepcopy(self) |
def genseq2(wtseq, mutations, keepdupes=False):
' generate a sequences library based of wtseq\n @param: list of tupel, [ (resid, library), (resid, library), ...]\n\n @returns: list of sequences\n '
def estimator(mutations):
est = 1
for mut in mutations:
lib = mut[1]
est *= (len(lib) + 1)
return est
logger.info('will mutate wtseq %s and create about %s mutations', wtseq, estimator(mutations))
seqo = list(wtseq)
sequences = [seqo]
while (len(mutations) > 0):
newseqs = sequences[:]
(res, lib) = mutations.pop()
for seqo in sequences:
res = int(res)
if (res < 1):
raise ValueError('Impossible: resid < 1!', res)
pos = (res - 1)
for aa in lib:
if (len(aa) != 1):
raise ValueError('Impossible 1-letter aminoacid', aa, 'in lib', lib)
seqn = seqo[:]
seqn[pos] = aa
if (keepdupes or (seqn not in newseqs)):
newseqs.append(seqn)
sequences = newseqs
return sequences | 5,363,564,138,921,414,000 | generate a sequences library based of wtseq
@param: list of tupel, [ (resid, library), (resid, library), ...]
@returns: list of sequences | cadee/prep/genseqs.py | genseq2 | kamerlinlab/cadee | python | def genseq2(wtseq, mutations, keepdupes=False):
' generate a sequences library based of wtseq\n @param: list of tupel, [ (resid, library), (resid, library), ...]\n\n @returns: list of sequences\n '
def estimator(mutations):
est = 1
for mut in mutations:
lib = mut[1]
est *= (len(lib) + 1)
return est
logger.info('will mutate wtseq %s and create about %s mutations', wtseq, estimator(mutations))
seqo = list(wtseq)
sequences = [seqo]
while (len(mutations) > 0):
newseqs = sequences[:]
(res, lib) = mutations.pop()
for seqo in sequences:
res = int(res)
if (res < 1):
raise ValueError('Impossible: resid < 1!', res)
pos = (res - 1)
for aa in lib:
if (len(aa) != 1):
raise ValueError('Impossible 1-letter aminoacid', aa, 'in lib', lib)
seqn = seqo[:]
seqn[pos] = aa
if (keepdupes or (seqn not in newseqs)):
newseqs.append(seqn)
sequences = newseqs
return sequences |
def combine(lib, pos):
'generate combinations of up to 7.\n @param lib: library\n @param pos: positions to mutate\n # TODO: implement in readable (recursively)\n '
numseqs = 1
for each in lib:
numseqs *= len(each)
logger.info('Generating %s %s', numseqs, 'sequeces. Please wait.')
seqlib = []
logger.info('Library %s, Positions %s', lib, pos)
for every in lib[0]:
if (len(pos) > 1):
for (every2,) in lib[1]:
if (len(pos) > 2):
for (every3,) in lib[2]:
if (len(pos) > 3):
for (every4,) in lib[3]:
if (len(pos) > 4):
for (every5,) in lib[4]:
if (len(pos) > 5):
for (every6,) in lib[5]:
if (len(pos) > 6):
for every7 in lib[6]:
seqlib.append([every, every2, every3, every4, every5, every6, every7])
else:
seqlib.append([every, every2, every3, every4, every5, every6])
else:
seqlib.append([every, every2, every3, every4, every5])
else:
seqlib.append([every, every2, every3, every4, every4])
else:
seqlib.append([every, every2, every3])
else:
seqlib.append([every, every2])
else:
seqlib.append([every])
return seqlib | -2,113,763,849,492,463,000 | generate combinations of up to 7.
@param lib: library
@param pos: positions to mutate
# TODO: implement in readable (recursively) | cadee/prep/genseqs.py | combine | kamerlinlab/cadee | python | def combine(lib, pos):
'generate combinations of up to 7.\n @param lib: library\n @param pos: positions to mutate\n # TODO: implement in readable (recursively)\n '
numseqs = 1
for each in lib:
numseqs *= len(each)
logger.info('Generating %s %s', numseqs, 'sequeces. Please wait.')
seqlib = []
logger.info('Library %s, Positions %s', lib, pos)
for every in lib[0]:
if (len(pos) > 1):
for (every2,) in lib[1]:
if (len(pos) > 2):
for (every3,) in lib[2]:
if (len(pos) > 3):
for (every4,) in lib[3]:
if (len(pos) > 4):
for (every5,) in lib[4]:
if (len(pos) > 5):
for (every6,) in lib[5]:
if (len(pos) > 6):
for every7 in lib[6]:
seqlib.append([every, every2, every3, every4, every5, every6, every7])
else:
seqlib.append([every, every2, every3, every4, every5, every6])
else:
seqlib.append([every, every2, every3, every4, every5])
else:
seqlib.append([every, every2, every3, every4, every4])
else:
seqlib.append([every, every2, every3])
else:
seqlib.append([every, every2])
else:
seqlib.append([every])
return seqlib |
def gen_seqlib(sequence, pos, lib):
'\n Generates sequences, mutating at pos[x] to all as in lib[x]\n Generates sequences, mutating at pos[x] if len(lib)==1,\n the same lib will be used for all\n Return sequences\n '
if isinstance(lib, str):
lib = [lib]
if (len(lib) == 1):
while range(1, len(pos)):
lib.append(lib[0])
if (len(pos) != len(lib)):
msg = 'Bad Input: Dimensions of pos and lib must be equal: '
msg += 'found: #pos: {0}, #lib {1}'.format(len(pos), len(lib))
raise (Exception, msg)
seqlib = combine(lib, pos)
sequences_1d = {}
for i in range(0, len(seqlib)):
nfa = list(sequence)
for (j, posj) in pos:
if (nfa[posj].upper() != seqlib[i][j].upper()):
nfa[posj] = seqlib[i][j]
modseq = ''.join(nfa)
sequences_1d[modseq] = 1
return sequences_1d | -6,607,761,989,149,985,000 | Generates sequences, mutating at pos[x] to all as in lib[x]
Generates sequences, mutating at pos[x] if len(lib)==1,
the same lib will be used for all
Return sequences | cadee/prep/genseqs.py | gen_seqlib | kamerlinlab/cadee | python | def gen_seqlib(sequence, pos, lib):
'\n Generates sequences, mutating at pos[x] to all as in lib[x]\n Generates sequences, mutating at pos[x] if len(lib)==1,\n the same lib will be used for all\n Return sequences\n '
if isinstance(lib, str):
lib = [lib]
if (len(lib) == 1):
while range(1, len(pos)):
lib.append(lib[0])
if (len(pos) != len(lib)):
msg = 'Bad Input: Dimensions of pos and lib must be equal: '
msg += 'found: #pos: {0}, #lib {1}'.format(len(pos), len(lib))
raise (Exception, msg)
seqlib = combine(lib, pos)
sequences_1d = {}
for i in range(0, len(seqlib)):
nfa = list(sequence)
for (j, posj) in pos:
if (nfa[posj].upper() != seqlib[i][j].upper()):
nfa[posj] = seqlib[i][j]
modseq = .join(nfa)
sequences_1d[modseq] = 1
return sequences_1d |
def get_fasta(wtpdb):
'Return fasta code of wtpdb'
from pyscwrl import babel_pdb_for_scwrl
babel_pdb_for_scwrl(wtpdb)
fasta = ''
for line in open('proper.fasta'):
line = line[:(- 1)]
if (line[0] == '>'):
continue
for char in line:
fasta += char.lower()
return fasta | 7,253,762,444,206,976,000 | Return fasta code of wtpdb | cadee/prep/genseqs.py | get_fasta | kamerlinlab/cadee | python | def get_fasta(wtpdb):
from pyscwrl import babel_pdb_for_scwrl
babel_pdb_for_scwrl(wtpdb)
fasta =
for line in open('proper.fasta'):
line = line[:(- 1)]
if (line[0] == '>'):
continue
for char in line:
fasta += char.lower()
return fasta |
def get_sequences(wtpdb, resids, library):
'Return list of sequences for resids, created with library'
print(wtpdb, resids)
fasta = get_fasta(wtpdb)
posids = []
for resid in resids:
posids.append((int(resid) - 1))
sequences = gen_seqlib(fasta, posids, [library])
return sequences | 8,925,430,724,545,916,000 | Return list of sequences for resids, created with library | cadee/prep/genseqs.py | get_sequences | kamerlinlab/cadee | python | def get_sequences(wtpdb, resids, library):
print(wtpdb, resids)
fasta = get_fasta(wtpdb)
posids = []
for resid in resids:
posids.append((int(resid) - 1))
sequences = gen_seqlib(fasta, posids, [library])
return sequences |
def usage():
'Print Usage and exit'
print('')
print('Usage:')
print(((' ' + sys.argv[0]) + ' qprep-wt.pdb res1 [ res2 ...] ]'))
print('')
sys.exit(ERR_USAGE) | -8,156,399,249,877,998,000 | Print Usage and exit | cadee/prep/genseqs.py | usage | kamerlinlab/cadee | python | def usage():
print()
print('Usage:')
print(((' ' + sys.argv[0]) + ' qprep-wt.pdb res1 [ res2 ...] ]'))
print()
sys.exit(ERR_USAGE) |
def get_resnumbers(args):
'Return residue-numbers as list-of-integers'
resids = []
for resid in args:
try:
resids.append(int(resid))
except ValueError:
print('ValueError with ', resid, ' expected: Integer')
usage()
if (len(resids) > 7):
print('FATAL:')
print('You ask me to mutate more than 7 residues at one time.')
print('This is NOT IMPLEMENTED... ...probably a BAD IDEA :')
print('This is a bad idea, because we grow with LIBRARY^{#RES}!')
print('In your case ', len(LIB), '^', len(LIB), '=', (len(LIB) ** len(resids)), '!')
usage()
return resids | 1,992,831,559,295,503,000 | Return residue-numbers as list-of-integers | cadee/prep/genseqs.py | get_resnumbers | kamerlinlab/cadee | python | def get_resnumbers(args):
resids = []
for resid in args:
try:
resids.append(int(resid))
except ValueError:
print('ValueError with ', resid, ' expected: Integer')
usage()
if (len(resids) > 7):
print('FATAL:')
print('You ask me to mutate more than 7 residues at one time.')
print('This is NOT IMPLEMENTED... ...probably a BAD IDEA :')
print('This is a bad idea, because we grow with LIBRARY^{#RES}!')
print('In your case ', len(LIB), '^', len(LIB), '=', (len(LIB) ** len(resids)), '!')
usage()
return resids |
def delete_widgets_bundle_using_delete(self, widgets_bundle_id, **kwargs):
"Delete widgets bundle (deleteWidgetsBundle) # noqa: E501\n\n Deletes the widget bundle. Referencing non-existing Widget Bundle Id will cause an error. Available for users with 'SYS_ADMIN' or 'TENANT_ADMIN' authority. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.delete_widgets_bundle_using_delete(widgets_bundle_id, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str widgets_bundle_id: A string value representing the widget bundle id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)\n :return: None\n If the method is called asynchronously,\n returns the request thread.\n "
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_widgets_bundle_using_delete_with_http_info(widgets_bundle_id, **kwargs)
else:
data = self.delete_widgets_bundle_using_delete_with_http_info(widgets_bundle_id, **kwargs)
return data | 4,747,062,505,124,701,000 | Delete widgets bundle (deleteWidgetsBundle) # noqa: E501
Deletes the widget bundle. Referencing non-existing Widget Bundle Id will cause an error. Available for users with 'SYS_ADMIN' or 'TENANT_ADMIN' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_widgets_bundle_using_delete(widgets_bundle_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str widgets_bundle_id: A string value representing the widget bundle id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:return: None
If the method is called asynchronously,
returns the request thread. | tb_rest_client/api/api_pe/widgets_bundle_controller_api.py | delete_widgets_bundle_using_delete | D34DPlayer/thingsboard-python-rest-client | python | def delete_widgets_bundle_using_delete(self, widgets_bundle_id, **kwargs):
"Delete widgets bundle (deleteWidgetsBundle) # noqa: E501\n\n Deletes the widget bundle. Referencing non-existing Widget Bundle Id will cause an error. Available for users with 'SYS_ADMIN' or 'TENANT_ADMIN' authority. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.delete_widgets_bundle_using_delete(widgets_bundle_id, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str widgets_bundle_id: A string value representing the widget bundle id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)\n :return: None\n If the method is called asynchronously,\n returns the request thread.\n "
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_widgets_bundle_using_delete_with_http_info(widgets_bundle_id, **kwargs)
else:
data = self.delete_widgets_bundle_using_delete_with_http_info(widgets_bundle_id, **kwargs)
return data |
def delete_widgets_bundle_using_delete_with_http_info(self, widgets_bundle_id, **kwargs):
"Delete widgets bundle (deleteWidgetsBundle) # noqa: E501\n\n Deletes the widget bundle. Referencing non-existing Widget Bundle Id will cause an error. Available for users with 'SYS_ADMIN' or 'TENANT_ADMIN' authority. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.delete_widgets_bundle_using_delete_with_http_info(widgets_bundle_id, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str widgets_bundle_id: A string value representing the widget bundle id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)\n :return: None\n If the method is called asynchronously,\n returns the request thread.\n "
all_params = ['widgets_bundle_id']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for (key, val) in six.iteritems(params['kwargs']):
if (key not in all_params):
raise TypeError(("Got an unexpected keyword argument '%s' to method delete_widgets_bundle_using_delete" % key))
params[key] = val
del params['kwargs']
if (('widgets_bundle_id' not in params) or (params['widgets_bundle_id'] is None)):
raise ValueError('Missing the required parameter `widgets_bundle_id` when calling `delete_widgets_bundle_using_delete`')
collection_formats = {}
path_params = {}
if ('widgets_bundle_id' in params):
path_params['widgetsBundleId'] = params['widgets_bundle_id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
header_params['Accept'] = self.api_client.select_header_accept(['application/json'])
auth_settings = ['X-Authorization']
return self.api_client.call_api('/api/widgetsBundle/{widgetsBundleId}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) | 6,203,597,513,249,683,000 | Delete widgets bundle (deleteWidgetsBundle) # noqa: E501
Deletes the widget bundle. Referencing non-existing Widget Bundle Id will cause an error. Available for users with 'SYS_ADMIN' or 'TENANT_ADMIN' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_widgets_bundle_using_delete_with_http_info(widgets_bundle_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str widgets_bundle_id: A string value representing the widget bundle id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:return: None
If the method is called asynchronously,
returns the request thread. | tb_rest_client/api/api_pe/widgets_bundle_controller_api.py | delete_widgets_bundle_using_delete_with_http_info | D34DPlayer/thingsboard-python-rest-client | python | def delete_widgets_bundle_using_delete_with_http_info(self, widgets_bundle_id, **kwargs):
"Delete widgets bundle (deleteWidgetsBundle) # noqa: E501\n\n Deletes the widget bundle. Referencing non-existing Widget Bundle Id will cause an error. Available for users with 'SYS_ADMIN' or 'TENANT_ADMIN' authority. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.delete_widgets_bundle_using_delete_with_http_info(widgets_bundle_id, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str widgets_bundle_id: A string value representing the widget bundle id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)\n :return: None\n If the method is called asynchronously,\n returns the request thread.\n "
all_params = ['widgets_bundle_id']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for (key, val) in six.iteritems(params['kwargs']):
if (key not in all_params):
raise TypeError(("Got an unexpected keyword argument '%s' to method delete_widgets_bundle_using_delete" % key))
params[key] = val
del params['kwargs']
if (('widgets_bundle_id' not in params) or (params['widgets_bundle_id'] is None)):
raise ValueError('Missing the required parameter `widgets_bundle_id` when calling `delete_widgets_bundle_using_delete`')
collection_formats = {}
path_params = {}
if ('widgets_bundle_id' in params):
path_params['widgetsBundleId'] = params['widgets_bundle_id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
header_params['Accept'] = self.api_client.select_header_accept(['application/json'])
auth_settings = ['X-Authorization']
return self.api_client.call_api('/api/widgetsBundle/{widgetsBundleId}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type=None, auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) |
def get_widgets_bundle_by_id_using_get(self, widgets_bundle_id, **kwargs):
"Get Widget Bundle (getWidgetsBundleById) # noqa: E501\n\n Get the Widget Bundle based on the provided Widget Bundle Id. Widget Bundle represents a group(bundle) of widgets. Widgets are grouped into bundle by type or use case. Available for any authorized user. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.get_widgets_bundle_by_id_using_get(widgets_bundle_id, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str widgets_bundle_id: A string value representing the widget bundle id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)\n :return: WidgetsBundle\n If the method is called asynchronously,\n returns the request thread.\n "
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_widgets_bundle_by_id_using_get_with_http_info(widgets_bundle_id, **kwargs)
else:
data = self.get_widgets_bundle_by_id_using_get_with_http_info(widgets_bundle_id, **kwargs)
return data | 3,337,590,703,368,964,600 | Get Widget Bundle (getWidgetsBundleById) # noqa: E501
Get the Widget Bundle based on the provided Widget Bundle Id. Widget Bundle represents a group(bundle) of widgets. Widgets are grouped into bundle by type or use case. Available for any authorized user. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_widgets_bundle_by_id_using_get(widgets_bundle_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str widgets_bundle_id: A string value representing the widget bundle id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:return: WidgetsBundle
If the method is called asynchronously,
returns the request thread. | tb_rest_client/api/api_pe/widgets_bundle_controller_api.py | get_widgets_bundle_by_id_using_get | D34DPlayer/thingsboard-python-rest-client | python | def get_widgets_bundle_by_id_using_get(self, widgets_bundle_id, **kwargs):
"Get Widget Bundle (getWidgetsBundleById) # noqa: E501\n\n Get the Widget Bundle based on the provided Widget Bundle Id. Widget Bundle represents a group(bundle) of widgets. Widgets are grouped into bundle by type or use case. Available for any authorized user. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.get_widgets_bundle_by_id_using_get(widgets_bundle_id, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str widgets_bundle_id: A string value representing the widget bundle id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)\n :return: WidgetsBundle\n If the method is called asynchronously,\n returns the request thread.\n "
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_widgets_bundle_by_id_using_get_with_http_info(widgets_bundle_id, **kwargs)
else:
data = self.get_widgets_bundle_by_id_using_get_with_http_info(widgets_bundle_id, **kwargs)
return data |
def get_widgets_bundle_by_id_using_get_with_http_info(self, widgets_bundle_id, **kwargs):
"Get Widget Bundle (getWidgetsBundleById) # noqa: E501\n\n Get the Widget Bundle based on the provided Widget Bundle Id. Widget Bundle represents a group(bundle) of widgets. Widgets are grouped into bundle by type or use case. Available for any authorized user. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.get_widgets_bundle_by_id_using_get_with_http_info(widgets_bundle_id, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str widgets_bundle_id: A string value representing the widget bundle id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)\n :return: WidgetsBundle\n If the method is called asynchronously,\n returns the request thread.\n "
all_params = ['widgets_bundle_id']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for (key, val) in six.iteritems(params['kwargs']):
if (key not in all_params):
raise TypeError(("Got an unexpected keyword argument '%s' to method get_widgets_bundle_by_id_using_get" % key))
params[key] = val
del params['kwargs']
if (('widgets_bundle_id' not in params) or (params['widgets_bundle_id'] is None)):
raise ValueError('Missing the required parameter `widgets_bundle_id` when calling `get_widgets_bundle_by_id_using_get`')
collection_formats = {}
path_params = {}
if ('widgets_bundle_id' in params):
path_params['widgetsBundleId'] = params['widgets_bundle_id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
header_params['Accept'] = self.api_client.select_header_accept(['application/json'])
auth_settings = ['X-Authorization']
return self.api_client.call_api('/api/widgetsBundle/{widgetsBundleId}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='WidgetsBundle', auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) | -5,290,086,580,090,830,000 | Get Widget Bundle (getWidgetsBundleById) # noqa: E501
Get the Widget Bundle based on the provided Widget Bundle Id. Widget Bundle represents a group(bundle) of widgets. Widgets are grouped into bundle by type or use case. Available for any authorized user. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_widgets_bundle_by_id_using_get_with_http_info(widgets_bundle_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str widgets_bundle_id: A string value representing the widget bundle id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:return: WidgetsBundle
If the method is called asynchronously,
returns the request thread. | tb_rest_client/api/api_pe/widgets_bundle_controller_api.py | get_widgets_bundle_by_id_using_get_with_http_info | D34DPlayer/thingsboard-python-rest-client | python | def get_widgets_bundle_by_id_using_get_with_http_info(self, widgets_bundle_id, **kwargs):
"Get Widget Bundle (getWidgetsBundleById) # noqa: E501\n\n Get the Widget Bundle based on the provided Widget Bundle Id. Widget Bundle represents a group(bundle) of widgets. Widgets are grouped into bundle by type or use case. Available for any authorized user. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.get_widgets_bundle_by_id_using_get_with_http_info(widgets_bundle_id, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str widgets_bundle_id: A string value representing the widget bundle id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)\n :return: WidgetsBundle\n If the method is called asynchronously,\n returns the request thread.\n "
all_params = ['widgets_bundle_id']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for (key, val) in six.iteritems(params['kwargs']):
if (key not in all_params):
raise TypeError(("Got an unexpected keyword argument '%s' to method get_widgets_bundle_by_id_using_get" % key))
params[key] = val
del params['kwargs']
if (('widgets_bundle_id' not in params) or (params['widgets_bundle_id'] is None)):
raise ValueError('Missing the required parameter `widgets_bundle_id` when calling `get_widgets_bundle_by_id_using_get`')
collection_formats = {}
path_params = {}
if ('widgets_bundle_id' in params):
path_params['widgetsBundleId'] = params['widgets_bundle_id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
header_params['Accept'] = self.api_client.select_header_accept(['application/json'])
auth_settings = ['X-Authorization']
return self.api_client.call_api('/api/widgetsBundle/{widgetsBundleId}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='WidgetsBundle', auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) |
def get_widgets_bundles_using_get(self, **kwargs):
'Get all Widget Bundles (getWidgetsBundles) # noqa: E501\n\n Returns an array of Widget Bundle objects that are available for current user.Widget Bundle represents a group(bundle) of widgets. Widgets are grouped into bundle by type or use case. Available for any authorized user. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.get_widgets_bundles_using_get(async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :return: list[WidgetsBundle]\n If the method is called asynchronously,\n returns the request thread.\n '
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_widgets_bundles_using_get_with_http_info(**kwargs)
else:
data = self.get_widgets_bundles_using_get_with_http_info(**kwargs)
return data | -980,786,956,624,726,500 | Get all Widget Bundles (getWidgetsBundles) # noqa: E501
Returns an array of Widget Bundle objects that are available for current user.Widget Bundle represents a group(bundle) of widgets. Widgets are grouped into bundle by type or use case. Available for any authorized user. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_widgets_bundles_using_get(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[WidgetsBundle]
If the method is called asynchronously,
returns the request thread. | tb_rest_client/api/api_pe/widgets_bundle_controller_api.py | get_widgets_bundles_using_get | D34DPlayer/thingsboard-python-rest-client | python | def get_widgets_bundles_using_get(self, **kwargs):
'Get all Widget Bundles (getWidgetsBundles) # noqa: E501\n\n Returns an array of Widget Bundle objects that are available for current user.Widget Bundle represents a group(bundle) of widgets. Widgets are grouped into bundle by type or use case. Available for any authorized user. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.get_widgets_bundles_using_get(async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :return: list[WidgetsBundle]\n If the method is called asynchronously,\n returns the request thread.\n '
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_widgets_bundles_using_get_with_http_info(**kwargs)
else:
data = self.get_widgets_bundles_using_get_with_http_info(**kwargs)
return data |
def get_widgets_bundles_using_get_with_http_info(self, **kwargs):
'Get all Widget Bundles (getWidgetsBundles) # noqa: E501\n\n Returns an array of Widget Bundle objects that are available for current user.Widget Bundle represents a group(bundle) of widgets. Widgets are grouped into bundle by type or use case. Available for any authorized user. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.get_widgets_bundles_using_get_with_http_info(async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :return: list[WidgetsBundle]\n If the method is called asynchronously,\n returns the request thread.\n '
all_params = []
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for (key, val) in six.iteritems(params['kwargs']):
if (key not in all_params):
raise TypeError(("Got an unexpected keyword argument '%s' to method get_widgets_bundles_using_get" % key))
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
header_params['Accept'] = self.api_client.select_header_accept(['application/json'])
auth_settings = ['X-Authorization']
return self.api_client.call_api('/api/widgetsBundles', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='list[WidgetsBundle]', auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) | -4,335,309,213,342,309,000 | Get all Widget Bundles (getWidgetsBundles) # noqa: E501
Returns an array of Widget Bundle objects that are available for current user.Widget Bundle represents a group(bundle) of widgets. Widgets are grouped into bundle by type or use case. Available for any authorized user. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_widgets_bundles_using_get_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[WidgetsBundle]
If the method is called asynchronously,
returns the request thread. | tb_rest_client/api/api_pe/widgets_bundle_controller_api.py | get_widgets_bundles_using_get_with_http_info | D34DPlayer/thingsboard-python-rest-client | python | def get_widgets_bundles_using_get_with_http_info(self, **kwargs):
'Get all Widget Bundles (getWidgetsBundles) # noqa: E501\n\n Returns an array of Widget Bundle objects that are available for current user.Widget Bundle represents a group(bundle) of widgets. Widgets are grouped into bundle by type or use case. Available for any authorized user. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.get_widgets_bundles_using_get_with_http_info(async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :return: list[WidgetsBundle]\n If the method is called asynchronously,\n returns the request thread.\n '
all_params = []
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for (key, val) in six.iteritems(params['kwargs']):
if (key not in all_params):
raise TypeError(("Got an unexpected keyword argument '%s' to method get_widgets_bundles_using_get" % key))
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
header_params['Accept'] = self.api_client.select_header_accept(['application/json'])
auth_settings = ['X-Authorization']
return self.api_client.call_api('/api/widgetsBundles', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='list[WidgetsBundle]', auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) |
def get_widgets_bundles_using_get1(self, page_size, page, **kwargs):
"Get Widget Bundles (getWidgetsBundles) # noqa: E501\n\n Returns a page of Widget Bundle objects available for current user. Widget Bundle represents a group(bundle) of widgets. Widgets are grouped into bundle by type or use case. You can specify parameters to filter the results. The result is wrapped with PageData object that allows you to iterate over result set using pagination. See the 'Model' tab of the Response Class for more details. Available for any authorized user. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.get_widgets_bundles_using_get1(page_size, page, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param int page_size: Maximum amount of entities in a one page (required)\n :param int page: Sequence number of page starting from 0 (required)\n :param str text_search: The case insensitive 'startsWith' filter based on the widget bundle title.\n :param str sort_property: Property of entity to sort by\n :param str sort_order: Sort order. ASC (ASCENDING) or DESC (DESCENDING)\n :return: PageDataWidgetsBundle\n If the method is called asynchronously,\n returns the request thread.\n "
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_widgets_bundles_using_get1_with_http_info(page_size, page, **kwargs)
else:
data = self.get_widgets_bundles_using_get1_with_http_info(page_size, page, **kwargs)
return data | 3,505,187,575,195,381,000 | Get Widget Bundles (getWidgetsBundles) # noqa: E501
Returns a page of Widget Bundle objects available for current user. Widget Bundle represents a group(bundle) of widgets. Widgets are grouped into bundle by type or use case. You can specify parameters to filter the results. The result is wrapped with PageData object that allows you to iterate over result set using pagination. See the 'Model' tab of the Response Class for more details. Available for any authorized user. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_widgets_bundles_using_get1(page_size, page, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page_size: Maximum amount of entities in a one page (required)
:param int page: Sequence number of page starting from 0 (required)
:param str text_search: The case insensitive 'startsWith' filter based on the widget bundle title.
:param str sort_property: Property of entity to sort by
:param str sort_order: Sort order. ASC (ASCENDING) or DESC (DESCENDING)
:return: PageDataWidgetsBundle
If the method is called asynchronously,
returns the request thread. | tb_rest_client/api/api_pe/widgets_bundle_controller_api.py | get_widgets_bundles_using_get1 | D34DPlayer/thingsboard-python-rest-client | python | def get_widgets_bundles_using_get1(self, page_size, page, **kwargs):
"Get Widget Bundles (getWidgetsBundles) # noqa: E501\n\n Returns a page of Widget Bundle objects available for current user. Widget Bundle represents a group(bundle) of widgets. Widgets are grouped into bundle by type or use case. You can specify parameters to filter the results. The result is wrapped with PageData object that allows you to iterate over result set using pagination. See the 'Model' tab of the Response Class for more details. Available for any authorized user. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.get_widgets_bundles_using_get1(page_size, page, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param int page_size: Maximum amount of entities in a one page (required)\n :param int page: Sequence number of page starting from 0 (required)\n :param str text_search: The case insensitive 'startsWith' filter based on the widget bundle title.\n :param str sort_property: Property of entity to sort by\n :param str sort_order: Sort order. ASC (ASCENDING) or DESC (DESCENDING)\n :return: PageDataWidgetsBundle\n If the method is called asynchronously,\n returns the request thread.\n "
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_widgets_bundles_using_get1_with_http_info(page_size, page, **kwargs)
else:
data = self.get_widgets_bundles_using_get1_with_http_info(page_size, page, **kwargs)
return data |
def get_widgets_bundles_using_get1_with_http_info(self, page_size, page, **kwargs):
"Get Widget Bundles (getWidgetsBundles) # noqa: E501\n\n Returns a page of Widget Bundle objects available for current user. Widget Bundle represents a group(bundle) of widgets. Widgets are grouped into bundle by type or use case. You can specify parameters to filter the results. The result is wrapped with PageData object that allows you to iterate over result set using pagination. See the 'Model' tab of the Response Class for more details. Available for any authorized user. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.get_widgets_bundles_using_get1_with_http_info(page_size, page, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param int page_size: Maximum amount of entities in a one page (required)\n :param int page: Sequence number of page starting from 0 (required)\n :param str text_search: The case insensitive 'startsWith' filter based on the widget bundle title.\n :param str sort_property: Property of entity to sort by\n :param str sort_order: Sort order. ASC (ASCENDING) or DESC (DESCENDING)\n :return: PageDataWidgetsBundle\n If the method is called asynchronously,\n returns the request thread.\n "
all_params = ['page_size', 'page', 'text_search', 'sort_property', 'sort_order']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for (key, val) in six.iteritems(params['kwargs']):
if (key not in all_params):
raise TypeError(("Got an unexpected keyword argument '%s' to method get_widgets_bundles_using_get1" % key))
params[key] = val
del params['kwargs']
if (('page_size' not in params) or (params['page_size'] is None)):
raise ValueError('Missing the required parameter `page_size` when calling `get_widgets_bundles_using_get1`')
if (('page' not in params) or (params['page'] is None)):
raise ValueError('Missing the required parameter `page` when calling `get_widgets_bundles_using_get1`')
collection_formats = {}
path_params = {}
query_params = []
if ('page_size' in params):
query_params.append(('pageSize', params['page_size']))
if ('page' in params):
query_params.append(('page', params['page']))
if ('text_search' in params):
query_params.append(('textSearch', params['text_search']))
if ('sort_property' in params):
query_params.append(('sortProperty', params['sort_property']))
if ('sort_order' in params):
query_params.append(('sortOrder', params['sort_order']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
header_params['Accept'] = self.api_client.select_header_accept(['application/json'])
auth_settings = ['X-Authorization']
return self.api_client.call_api('/api/widgetsBundles{?page,pageSize,sortOrder,sortProperty,textSearch}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='PageDataWidgetsBundle', auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) | 6,871,705,365,096,214,000 | Get Widget Bundles (getWidgetsBundles) # noqa: E501
Returns a page of Widget Bundle objects available for current user. Widget Bundle represents a group(bundle) of widgets. Widgets are grouped into bundle by type or use case. You can specify parameters to filter the results. The result is wrapped with PageData object that allows you to iterate over result set using pagination. See the 'Model' tab of the Response Class for more details. Available for any authorized user. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_widgets_bundles_using_get1_with_http_info(page_size, page, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page_size: Maximum amount of entities in a one page (required)
:param int page: Sequence number of page starting from 0 (required)
:param str text_search: The case insensitive 'startsWith' filter based on the widget bundle title.
:param str sort_property: Property of entity to sort by
:param str sort_order: Sort order. ASC (ASCENDING) or DESC (DESCENDING)
:return: PageDataWidgetsBundle
If the method is called asynchronously,
returns the request thread. | tb_rest_client/api/api_pe/widgets_bundle_controller_api.py | get_widgets_bundles_using_get1_with_http_info | D34DPlayer/thingsboard-python-rest-client | python | def get_widgets_bundles_using_get1_with_http_info(self, page_size, page, **kwargs):
"Get Widget Bundles (getWidgetsBundles) # noqa: E501\n\n Returns a page of Widget Bundle objects available for current user. Widget Bundle represents a group(bundle) of widgets. Widgets are grouped into bundle by type or use case. You can specify parameters to filter the results. The result is wrapped with PageData object that allows you to iterate over result set using pagination. See the 'Model' tab of the Response Class for more details. Available for any authorized user. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.get_widgets_bundles_using_get1_with_http_info(page_size, page, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param int page_size: Maximum amount of entities in a one page (required)\n :param int page: Sequence number of page starting from 0 (required)\n :param str text_search: The case insensitive 'startsWith' filter based on the widget bundle title.\n :param str sort_property: Property of entity to sort by\n :param str sort_order: Sort order. ASC (ASCENDING) or DESC (DESCENDING)\n :return: PageDataWidgetsBundle\n If the method is called asynchronously,\n returns the request thread.\n "
all_params = ['page_size', 'page', 'text_search', 'sort_property', 'sort_order']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for (key, val) in six.iteritems(params['kwargs']):
if (key not in all_params):
raise TypeError(("Got an unexpected keyword argument '%s' to method get_widgets_bundles_using_get1" % key))
params[key] = val
del params['kwargs']
if (('page_size' not in params) or (params['page_size'] is None)):
raise ValueError('Missing the required parameter `page_size` when calling `get_widgets_bundles_using_get1`')
if (('page' not in params) or (params['page'] is None)):
raise ValueError('Missing the required parameter `page` when calling `get_widgets_bundles_using_get1`')
collection_formats = {}
path_params = {}
query_params = []
if ('page_size' in params):
query_params.append(('pageSize', params['page_size']))
if ('page' in params):
query_params.append(('page', params['page']))
if ('text_search' in params):
query_params.append(('textSearch', params['text_search']))
if ('sort_property' in params):
query_params.append(('sortProperty', params['sort_property']))
if ('sort_order' in params):
query_params.append(('sortOrder', params['sort_order']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
header_params['Accept'] = self.api_client.select_header_accept(['application/json'])
auth_settings = ['X-Authorization']
return self.api_client.call_api('/api/widgetsBundles{?page,pageSize,sortOrder,sortProperty,textSearch}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='PageDataWidgetsBundle', auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) |
def save_widgets_bundle_using_post(self, **kwargs):
"Create Or Update Widget Bundle (saveWidgetsBundle) # noqa: E501\n\n Create or update the Widget Bundle. Widget Bundle represents a group(bundle) of widgets. Widgets are grouped into bundle by type or use case. When creating the bundle, platform generates Widget Bundle Id as [time-based UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier#Version_1_(date-time_and_MAC_address)). The newly created Widget Bundle Id will be present in the response. Specify existing Widget Bundle id to update the Widget Bundle. Referencing non-existing Widget Bundle Id will cause 'Not Found' error. Widget Bundle alias is unique in the scope of tenant. Special Tenant Id '13814000-1dd2-11b2-8080-808080808080' is automatically used if the create bundle request is sent by user with 'SYS_ADMIN' authority. Available for users with 'SYS_ADMIN' or 'TENANT_ADMIN' authority. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.save_widgets_bundle_using_post(async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param WidgetsBundle body:\n :return: WidgetsBundle\n If the method is called asynchronously,\n returns the request thread.\n "
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.save_widgets_bundle_using_post_with_http_info(**kwargs)
else:
data = self.save_widgets_bundle_using_post_with_http_info(**kwargs)
return data | -1,972,856,413,691,748,600 | Create Or Update Widget Bundle (saveWidgetsBundle) # noqa: E501
Create or update the Widget Bundle. Widget Bundle represents a group(bundle) of widgets. Widgets are grouped into bundle by type or use case. When creating the bundle, platform generates Widget Bundle Id as [time-based UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier#Version_1_(date-time_and_MAC_address)). The newly created Widget Bundle Id will be present in the response. Specify existing Widget Bundle id to update the Widget Bundle. Referencing non-existing Widget Bundle Id will cause 'Not Found' error. Widget Bundle alias is unique in the scope of tenant. Special Tenant Id '13814000-1dd2-11b2-8080-808080808080' is automatically used if the create bundle request is sent by user with 'SYS_ADMIN' authority. Available for users with 'SYS_ADMIN' or 'TENANT_ADMIN' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.save_widgets_bundle_using_post(async_req=True)
>>> result = thread.get()
:param async_req bool
:param WidgetsBundle body:
:return: WidgetsBundle
If the method is called asynchronously,
returns the request thread. | tb_rest_client/api/api_pe/widgets_bundle_controller_api.py | save_widgets_bundle_using_post | D34DPlayer/thingsboard-python-rest-client | python | def save_widgets_bundle_using_post(self, **kwargs):
"Create Or Update Widget Bundle (saveWidgetsBundle) # noqa: E501\n\n Create or update the Widget Bundle. Widget Bundle represents a group(bundle) of widgets. Widgets are grouped into bundle by type or use case. When creating the bundle, platform generates Widget Bundle Id as [time-based UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier#Version_1_(date-time_and_MAC_address)). The newly created Widget Bundle Id will be present in the response. Specify existing Widget Bundle id to update the Widget Bundle. Referencing non-existing Widget Bundle Id will cause 'Not Found' error. Widget Bundle alias is unique in the scope of tenant. Special Tenant Id '13814000-1dd2-11b2-8080-808080808080' is automatically used if the create bundle request is sent by user with 'SYS_ADMIN' authority. Available for users with 'SYS_ADMIN' or 'TENANT_ADMIN' authority. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.save_widgets_bundle_using_post(async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param WidgetsBundle body:\n :return: WidgetsBundle\n If the method is called asynchronously,\n returns the request thread.\n "
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.save_widgets_bundle_using_post_with_http_info(**kwargs)
else:
data = self.save_widgets_bundle_using_post_with_http_info(**kwargs)
return data |
def save_widgets_bundle_using_post_with_http_info(self, **kwargs):
"Create Or Update Widget Bundle (saveWidgetsBundle) # noqa: E501\n\n Create or update the Widget Bundle. Widget Bundle represents a group(bundle) of widgets. Widgets are grouped into bundle by type or use case. When creating the bundle, platform generates Widget Bundle Id as [time-based UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier#Version_1_(date-time_and_MAC_address)). The newly created Widget Bundle Id will be present in the response. Specify existing Widget Bundle id to update the Widget Bundle. Referencing non-existing Widget Bundle Id will cause 'Not Found' error. Widget Bundle alias is unique in the scope of tenant. Special Tenant Id '13814000-1dd2-11b2-8080-808080808080' is automatically used if the create bundle request is sent by user with 'SYS_ADMIN' authority. Available for users with 'SYS_ADMIN' or 'TENANT_ADMIN' authority. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.save_widgets_bundle_using_post_with_http_info(async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param WidgetsBundle body:\n :return: WidgetsBundle\n If the method is called asynchronously,\n returns the request thread.\n "
all_params = ['body']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for (key, val) in six.iteritems(params['kwargs']):
if (key not in all_params):
raise TypeError(("Got an unexpected keyword argument '%s' to method save_widgets_bundle_using_post" % key))
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if ('body' in params):
body_params = params['body']
header_params['Accept'] = self.api_client.select_header_accept(['application/json'])
header_params['Content-Type'] = self.api_client.select_header_content_type(['application/json'])
auth_settings = ['X-Authorization']
return self.api_client.call_api('/api/widgetsBundle', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='WidgetsBundle', auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) | 6,562,157,225,668,929,000 | Create Or Update Widget Bundle (saveWidgetsBundle) # noqa: E501
Create or update the Widget Bundle. Widget Bundle represents a group(bundle) of widgets. Widgets are grouped into bundle by type or use case. When creating the bundle, platform generates Widget Bundle Id as [time-based UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier#Version_1_(date-time_and_MAC_address)). The newly created Widget Bundle Id will be present in the response. Specify existing Widget Bundle id to update the Widget Bundle. Referencing non-existing Widget Bundle Id will cause 'Not Found' error. Widget Bundle alias is unique in the scope of tenant. Special Tenant Id '13814000-1dd2-11b2-8080-808080808080' is automatically used if the create bundle request is sent by user with 'SYS_ADMIN' authority. Available for users with 'SYS_ADMIN' or 'TENANT_ADMIN' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.save_widgets_bundle_using_post_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param WidgetsBundle body:
:return: WidgetsBundle
If the method is called asynchronously,
returns the request thread. | tb_rest_client/api/api_pe/widgets_bundle_controller_api.py | save_widgets_bundle_using_post_with_http_info | D34DPlayer/thingsboard-python-rest-client | python | def save_widgets_bundle_using_post_with_http_info(self, **kwargs):
"Create Or Update Widget Bundle (saveWidgetsBundle) # noqa: E501\n\n Create or update the Widget Bundle. Widget Bundle represents a group(bundle) of widgets. Widgets are grouped into bundle by type or use case. When creating the bundle, platform generates Widget Bundle Id as [time-based UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier#Version_1_(date-time_and_MAC_address)). The newly created Widget Bundle Id will be present in the response. Specify existing Widget Bundle id to update the Widget Bundle. Referencing non-existing Widget Bundle Id will cause 'Not Found' error. Widget Bundle alias is unique in the scope of tenant. Special Tenant Id '13814000-1dd2-11b2-8080-808080808080' is automatically used if the create bundle request is sent by user with 'SYS_ADMIN' authority. Available for users with 'SYS_ADMIN' or 'TENANT_ADMIN' authority. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.save_widgets_bundle_using_post_with_http_info(async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param WidgetsBundle body:\n :return: WidgetsBundle\n If the method is called asynchronously,\n returns the request thread.\n "
all_params = ['body']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for (key, val) in six.iteritems(params['kwargs']):
if (key not in all_params):
raise TypeError(("Got an unexpected keyword argument '%s' to method save_widgets_bundle_using_post" % key))
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if ('body' in params):
body_params = params['body']
header_params['Accept'] = self.api_client.select_header_accept(['application/json'])
header_params['Content-Type'] = self.api_client.select_header_content_type(['application/json'])
auth_settings = ['X-Authorization']
return self.api_client.call_api('/api/widgetsBundle', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='WidgetsBundle', auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) |
def extract_type_tens(expression, component):
'\n Extract from a ``TensExpr`` all tensors with `component`.\n\n Returns two tensor expressions:\n\n * the first contains all ``Tensor`` of having `component`.\n * the second contains all remaining.\n\n\n '
if isinstance(expression, Tensor):
sp = [expression]
elif isinstance(expression, TensMul):
sp = expression.args
else:
raise ValueError('wrong type')
new_expr = S.One
residual_expr = S.One
for i in sp:
if (isinstance(i, Tensor) and (i.component == component)):
new_expr *= i
else:
residual_expr *= i
return (new_expr, residual_expr) | 803,409,615,657,882,500 | Extract from a ``TensExpr`` all tensors with `component`.
Returns two tensor expressions:
* the first contains all ``Tensor`` of having `component`.
* the second contains all remaining. | venv/lib/python3.7/site-packages/sympy/physics/hep/gamma_matrices.py | extract_type_tens | CatTiger/vnpy | python | def extract_type_tens(expression, component):
'\n Extract from a ``TensExpr`` all tensors with `component`.\n\n Returns two tensor expressions:\n\n * the first contains all ``Tensor`` of having `component`.\n * the second contains all remaining.\n\n\n '
if isinstance(expression, Tensor):
sp = [expression]
elif isinstance(expression, TensMul):
sp = expression.args
else:
raise ValueError('wrong type')
new_expr = S.One
residual_expr = S.One
for i in sp:
if (isinstance(i, Tensor) and (i.component == component)):
new_expr *= i
else:
residual_expr *= i
return (new_expr, residual_expr) |
def simplify_gpgp(ex, sort=True):
"\n simplify products ``G(i)*p(-i)*G(j)*p(-j) -> p(i)*p(-i)``\n\n Examples\n ========\n\n >>> from sympy.physics.hep.gamma_matrices import GammaMatrix as G, LorentzIndex, simplify_gpgp\n >>> from sympy.tensor.tensor import tensor_indices, tensor_heads\n >>> p, q = tensor_heads('p, q', [LorentzIndex])\n >>> i0,i1,i2,i3,i4,i5 = tensor_indices('i0:6', LorentzIndex)\n >>> ps = p(i0)*G(-i0)\n >>> qs = q(i0)*G(-i0)\n >>> simplify_gpgp(ps*qs*qs)\n GammaMatrix(-L_0)*p(L_0)*q(L_1)*q(-L_1)\n "
def _simplify_gpgp(ex):
components = ex.components
a = []
comp_map = []
for (i, comp) in enumerate(components):
comp_map.extend(([i] * comp.rank))
dum = [(i[0], i[1], comp_map[i[0]], comp_map[i[1]]) for i in ex.dum]
for i in range(len(components)):
if (components[i] != GammaMatrix):
continue
for dx in dum:
if (dx[2] == i):
p_pos1 = dx[3]
elif (dx[3] == i):
p_pos1 = dx[2]
else:
continue
comp1 = components[p_pos1]
if ((comp1.comm == 0) and (comp1.rank == 1)):
a.append((i, p_pos1))
if (not a):
return ex
elim = set()
tv = []
hit = True
coeff = S.One
ta = None
while hit:
hit = False
for (i, ai) in enumerate(a[:(- 1)]):
if (ai[0] in elim):
continue
if (ai[0] != (a[(i + 1)][0] - 1)):
continue
if (components[ai[1]] != components[a[(i + 1)][1]]):
continue
elim.add(ai[0])
elim.add(ai[1])
elim.add(a[(i + 1)][0])
elim.add(a[(i + 1)][1])
if (not ta):
ta = ex.split()
mu = TensorIndex('mu', LorentzIndex)
hit = True
if (i == 0):
coeff = ex.coeff
tx = (components[ai[1]](mu) * components[ai[1]]((- mu)))
if (len(a) == 2):
tx *= 4
tv.append(tx)
break
if tv:
a = [x for (j, x) in enumerate(ta) if (j not in elim)]
a.extend(tv)
t = (tensor_mul(*a) * coeff)
return t
else:
return ex
if sort:
ex = ex.sorted_components()
while 1:
t = _simplify_gpgp(ex)
if (t != ex):
ex = t
else:
return t | -4,643,742,040,632,448,000 | simplify products ``G(i)*p(-i)*G(j)*p(-j) -> p(i)*p(-i)``
Examples
========
>>> from sympy.physics.hep.gamma_matrices import GammaMatrix as G, LorentzIndex, simplify_gpgp
>>> from sympy.tensor.tensor import tensor_indices, tensor_heads
>>> p, q = tensor_heads('p, q', [LorentzIndex])
>>> i0,i1,i2,i3,i4,i5 = tensor_indices('i0:6', LorentzIndex)
>>> ps = p(i0)*G(-i0)
>>> qs = q(i0)*G(-i0)
>>> simplify_gpgp(ps*qs*qs)
GammaMatrix(-L_0)*p(L_0)*q(L_1)*q(-L_1) | venv/lib/python3.7/site-packages/sympy/physics/hep/gamma_matrices.py | simplify_gpgp | CatTiger/vnpy | python | def simplify_gpgp(ex, sort=True):
"\n simplify products ``G(i)*p(-i)*G(j)*p(-j) -> p(i)*p(-i)``\n\n Examples\n ========\n\n >>> from sympy.physics.hep.gamma_matrices import GammaMatrix as G, LorentzIndex, simplify_gpgp\n >>> from sympy.tensor.tensor import tensor_indices, tensor_heads\n >>> p, q = tensor_heads('p, q', [LorentzIndex])\n >>> i0,i1,i2,i3,i4,i5 = tensor_indices('i0:6', LorentzIndex)\n >>> ps = p(i0)*G(-i0)\n >>> qs = q(i0)*G(-i0)\n >>> simplify_gpgp(ps*qs*qs)\n GammaMatrix(-L_0)*p(L_0)*q(L_1)*q(-L_1)\n "
def _simplify_gpgp(ex):
components = ex.components
a = []
comp_map = []
for (i, comp) in enumerate(components):
comp_map.extend(([i] * comp.rank))
dum = [(i[0], i[1], comp_map[i[0]], comp_map[i[1]]) for i in ex.dum]
for i in range(len(components)):
if (components[i] != GammaMatrix):
continue
for dx in dum:
if (dx[2] == i):
p_pos1 = dx[3]
elif (dx[3] == i):
p_pos1 = dx[2]
else:
continue
comp1 = components[p_pos1]
if ((comp1.comm == 0) and (comp1.rank == 1)):
a.append((i, p_pos1))
if (not a):
return ex
elim = set()
tv = []
hit = True
coeff = S.One
ta = None
while hit:
hit = False
for (i, ai) in enumerate(a[:(- 1)]):
if (ai[0] in elim):
continue
if (ai[0] != (a[(i + 1)][0] - 1)):
continue
if (components[ai[1]] != components[a[(i + 1)][1]]):
continue
elim.add(ai[0])
elim.add(ai[1])
elim.add(a[(i + 1)][0])
elim.add(a[(i + 1)][1])
if (not ta):
ta = ex.split()
mu = TensorIndex('mu', LorentzIndex)
hit = True
if (i == 0):
coeff = ex.coeff
tx = (components[ai[1]](mu) * components[ai[1]]((- mu)))
if (len(a) == 2):
tx *= 4
tv.append(tx)
break
if tv:
a = [x for (j, x) in enumerate(ta) if (j not in elim)]
a.extend(tv)
t = (tensor_mul(*a) * coeff)
return t
else:
return ex
if sort:
ex = ex.sorted_components()
while 1:
t = _simplify_gpgp(ex)
if (t != ex):
ex = t
else:
return t |
def gamma_trace(t):
"\n trace of a single line of gamma matrices\n\n Examples\n ========\n\n >>> from sympy.physics.hep.gamma_matrices import GammaMatrix as G, gamma_trace, LorentzIndex\n >>> from sympy.tensor.tensor import tensor_indices, tensor_heads\n >>> p, q = tensor_heads('p, q', [LorentzIndex])\n >>> i0,i1,i2,i3,i4,i5 = tensor_indices('i0:6', LorentzIndex)\n >>> ps = p(i0)*G(-i0)\n >>> qs = q(i0)*G(-i0)\n >>> gamma_trace(G(i0)*G(i1))\n 4*metric(i0, i1)\n >>> gamma_trace(ps*ps) - 4*p(i0)*p(-i0)\n 0\n >>> gamma_trace(ps*qs + ps*ps) - 4*p(i0)*p(-i0) - 4*p(i0)*q(-i0)\n 0\n\n "
if isinstance(t, TensAdd):
res = TensAdd(*[_trace_single_line(x) for x in t.args])
return res
t = _simplify_single_line(t)
res = _trace_single_line(t)
return res | 1,445,378,646,411,838,500 | trace of a single line of gamma matrices
Examples
========
>>> from sympy.physics.hep.gamma_matrices import GammaMatrix as G, gamma_trace, LorentzIndex
>>> from sympy.tensor.tensor import tensor_indices, tensor_heads
>>> p, q = tensor_heads('p, q', [LorentzIndex])
>>> i0,i1,i2,i3,i4,i5 = tensor_indices('i0:6', LorentzIndex)
>>> ps = p(i0)*G(-i0)
>>> qs = q(i0)*G(-i0)
>>> gamma_trace(G(i0)*G(i1))
4*metric(i0, i1)
>>> gamma_trace(ps*ps) - 4*p(i0)*p(-i0)
0
>>> gamma_trace(ps*qs + ps*ps) - 4*p(i0)*p(-i0) - 4*p(i0)*q(-i0)
0 | venv/lib/python3.7/site-packages/sympy/physics/hep/gamma_matrices.py | gamma_trace | CatTiger/vnpy | python | def gamma_trace(t):
"\n trace of a single line of gamma matrices\n\n Examples\n ========\n\n >>> from sympy.physics.hep.gamma_matrices import GammaMatrix as G, gamma_trace, LorentzIndex\n >>> from sympy.tensor.tensor import tensor_indices, tensor_heads\n >>> p, q = tensor_heads('p, q', [LorentzIndex])\n >>> i0,i1,i2,i3,i4,i5 = tensor_indices('i0:6', LorentzIndex)\n >>> ps = p(i0)*G(-i0)\n >>> qs = q(i0)*G(-i0)\n >>> gamma_trace(G(i0)*G(i1))\n 4*metric(i0, i1)\n >>> gamma_trace(ps*ps) - 4*p(i0)*p(-i0)\n 0\n >>> gamma_trace(ps*qs + ps*ps) - 4*p(i0)*p(-i0) - 4*p(i0)*q(-i0)\n 0\n\n "
if isinstance(t, TensAdd):
res = TensAdd(*[_trace_single_line(x) for x in t.args])
return res
t = _simplify_single_line(t)
res = _trace_single_line(t)
return res |
def _simplify_single_line(expression):
"\n Simplify single-line product of gamma matrices.\n\n Examples\n ========\n\n >>> from sympy.physics.hep.gamma_matrices import GammaMatrix as G, LorentzIndex, _simplify_single_line\n >>> from sympy.tensor.tensor import tensor_indices, TensorHead\n >>> p = TensorHead('p', [LorentzIndex])\n >>> i0,i1 = tensor_indices('i0:2', LorentzIndex)\n >>> _simplify_single_line(G(i0)*G(i1)*p(-i1)*G(-i0)) + 2*G(i0)*p(-i0)\n 0\n\n "
(t1, t2) = extract_type_tens(expression, GammaMatrix)
if (t1 != 1):
t1 = kahane_simplify(t1)
res = (t1 * t2)
return res | -2,924,012,100,022,340,600 | Simplify single-line product of gamma matrices.
Examples
========
>>> from sympy.physics.hep.gamma_matrices import GammaMatrix as G, LorentzIndex, _simplify_single_line
>>> from sympy.tensor.tensor import tensor_indices, TensorHead
>>> p = TensorHead('p', [LorentzIndex])
>>> i0,i1 = tensor_indices('i0:2', LorentzIndex)
>>> _simplify_single_line(G(i0)*G(i1)*p(-i1)*G(-i0)) + 2*G(i0)*p(-i0)
0 | venv/lib/python3.7/site-packages/sympy/physics/hep/gamma_matrices.py | _simplify_single_line | CatTiger/vnpy | python | def _simplify_single_line(expression):
"\n Simplify single-line product of gamma matrices.\n\n Examples\n ========\n\n >>> from sympy.physics.hep.gamma_matrices import GammaMatrix as G, LorentzIndex, _simplify_single_line\n >>> from sympy.tensor.tensor import tensor_indices, TensorHead\n >>> p = TensorHead('p', [LorentzIndex])\n >>> i0,i1 = tensor_indices('i0:2', LorentzIndex)\n >>> _simplify_single_line(G(i0)*G(i1)*p(-i1)*G(-i0)) + 2*G(i0)*p(-i0)\n 0\n\n "
(t1, t2) = extract_type_tens(expression, GammaMatrix)
if (t1 != 1):
t1 = kahane_simplify(t1)
res = (t1 * t2)
return res |
def _trace_single_line(t):
"\n Evaluate the trace of a single gamma matrix line inside a ``TensExpr``.\n\n Notes\n =====\n\n If there are ``DiracSpinorIndex.auto_left`` and ``DiracSpinorIndex.auto_right``\n indices trace over them; otherwise traces are not implied (explain)\n\n\n Examples\n ========\n\n >>> from sympy.physics.hep.gamma_matrices import GammaMatrix as G, LorentzIndex, _trace_single_line\n >>> from sympy.tensor.tensor import tensor_indices, TensorHead\n >>> p = TensorHead('p', [LorentzIndex])\n >>> i0,i1,i2,i3,i4,i5 = tensor_indices('i0:6', LorentzIndex)\n >>> _trace_single_line(G(i0)*G(i1))\n 4*metric(i0, i1)\n >>> _trace_single_line(G(i0)*p(-i0)*G(i1)*p(-i1)) - 4*p(i0)*p(-i0)\n 0\n\n "
def _trace_single_line1(t):
t = t.sorted_components()
components = t.components
ncomps = len(components)
g = LorentzIndex.metric
hit = 0
for i in range(ncomps):
if (components[i] == GammaMatrix):
hit = 1
break
for j in range((i + hit), ncomps):
if (components[j] != GammaMatrix):
break
else:
j = ncomps
numG = (j - i)
if (numG == 0):
tcoeff = t.coeff
return (t.nocoeff if tcoeff else t)
if ((numG % 2) == 1):
return TensMul.from_data(S.Zero, [], [], [])
elif (numG > 4):
a = t.split()
ind1 = a[i].get_indices()[0]
ind2 = a[(i + 1)].get_indices()[0]
aa = (a[:i] + a[(i + 2):])
t1 = (tensor_mul(*aa) * g(ind1, ind2))
t1 = t1.contract_metric(g)
args = [t1]
sign = 1
for k in range((i + 2), j):
sign = (- sign)
ind2 = a[k].get_indices()[0]
aa = ((a[:i] + a[(i + 1):k]) + a[(k + 1):])
t2 = ((sign * tensor_mul(*aa)) * g(ind1, ind2))
t2 = t2.contract_metric(g)
t2 = simplify_gpgp(t2, False)
args.append(t2)
t3 = TensAdd(*args)
t3 = _trace_single_line(t3)
return t3
else:
a = t.split()
t1 = _gamma_trace1(*a[i:j])
a2 = (a[:i] + a[j:])
t2 = tensor_mul(*a2)
t3 = (t1 * t2)
if (not t3):
return t3
t3 = t3.contract_metric(g)
return t3
t = t.expand()
if isinstance(t, TensAdd):
a = [(_trace_single_line1(x) * x.coeff) for x in t.args]
return TensAdd(*a)
elif isinstance(t, (Tensor, TensMul)):
r = (t.coeff * _trace_single_line1(t))
return r
else:
return trace(t) | -5,840,871,638,110,041,000 | Evaluate the trace of a single gamma matrix line inside a ``TensExpr``.
Notes
=====
If there are ``DiracSpinorIndex.auto_left`` and ``DiracSpinorIndex.auto_right``
indices trace over them; otherwise traces are not implied (explain)
Examples
========
>>> from sympy.physics.hep.gamma_matrices import GammaMatrix as G, LorentzIndex, _trace_single_line
>>> from sympy.tensor.tensor import tensor_indices, TensorHead
>>> p = TensorHead('p', [LorentzIndex])
>>> i0,i1,i2,i3,i4,i5 = tensor_indices('i0:6', LorentzIndex)
>>> _trace_single_line(G(i0)*G(i1))
4*metric(i0, i1)
>>> _trace_single_line(G(i0)*p(-i0)*G(i1)*p(-i1)) - 4*p(i0)*p(-i0)
0 | venv/lib/python3.7/site-packages/sympy/physics/hep/gamma_matrices.py | _trace_single_line | CatTiger/vnpy | python | def _trace_single_line(t):
"\n Evaluate the trace of a single gamma matrix line inside a ``TensExpr``.\n\n Notes\n =====\n\n If there are ``DiracSpinorIndex.auto_left`` and ``DiracSpinorIndex.auto_right``\n indices trace over them; otherwise traces are not implied (explain)\n\n\n Examples\n ========\n\n >>> from sympy.physics.hep.gamma_matrices import GammaMatrix as G, LorentzIndex, _trace_single_line\n >>> from sympy.tensor.tensor import tensor_indices, TensorHead\n >>> p = TensorHead('p', [LorentzIndex])\n >>> i0,i1,i2,i3,i4,i5 = tensor_indices('i0:6', LorentzIndex)\n >>> _trace_single_line(G(i0)*G(i1))\n 4*metric(i0, i1)\n >>> _trace_single_line(G(i0)*p(-i0)*G(i1)*p(-i1)) - 4*p(i0)*p(-i0)\n 0\n\n "
def _trace_single_line1(t):
t = t.sorted_components()
components = t.components
ncomps = len(components)
g = LorentzIndex.metric
hit = 0
for i in range(ncomps):
if (components[i] == GammaMatrix):
hit = 1
break
for j in range((i + hit), ncomps):
if (components[j] != GammaMatrix):
break
else:
j = ncomps
numG = (j - i)
if (numG == 0):
tcoeff = t.coeff
return (t.nocoeff if tcoeff else t)
if ((numG % 2) == 1):
return TensMul.from_data(S.Zero, [], [], [])
elif (numG > 4):
a = t.split()
ind1 = a[i].get_indices()[0]
ind2 = a[(i + 1)].get_indices()[0]
aa = (a[:i] + a[(i + 2):])
t1 = (tensor_mul(*aa) * g(ind1, ind2))
t1 = t1.contract_metric(g)
args = [t1]
sign = 1
for k in range((i + 2), j):
sign = (- sign)
ind2 = a[k].get_indices()[0]
aa = ((a[:i] + a[(i + 1):k]) + a[(k + 1):])
t2 = ((sign * tensor_mul(*aa)) * g(ind1, ind2))
t2 = t2.contract_metric(g)
t2 = simplify_gpgp(t2, False)
args.append(t2)
t3 = TensAdd(*args)
t3 = _trace_single_line(t3)
return t3
else:
a = t.split()
t1 = _gamma_trace1(*a[i:j])
a2 = (a[:i] + a[j:])
t2 = tensor_mul(*a2)
t3 = (t1 * t2)
if (not t3):
return t3
t3 = t3.contract_metric(g)
return t3
t = t.expand()
if isinstance(t, TensAdd):
a = [(_trace_single_line1(x) * x.coeff) for x in t.args]
return TensAdd(*a)
elif isinstance(t, (Tensor, TensMul)):
r = (t.coeff * _trace_single_line1(t))
return r
else:
return trace(t) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.