body
stringlengths 26
98.2k
| body_hash
int64 -9,222,864,604,528,158,000
9,221,803,474B
| docstring
stringlengths 1
16.8k
| path
stringlengths 5
230
| name
stringlengths 1
96
| repository_name
stringlengths 7
89
| lang
stringclasses 1
value | body_without_docstring
stringlengths 20
98.2k
|
---|---|---|---|---|---|---|---|
@classmethod
def get_name(cls):
" Default implementation of a function that returns a class's name. "
if cls.ACCESSORY_NAME:
return cls.ACCESSORY_NAME
return cls.__name__ | 6,766,631,347,145,509,000 | Default implementation of a function that returns a class's name. | binho/accessory.py | get_name | binhollc/binho-python-package | python | @classmethod
def get_name(cls):
" "
if cls.ACCESSORY_NAME:
return cls.ACCESSORY_NAME
return cls.__name__ |
@classmethod
def available_accessories(cls):
' Returns a list of available neighbors. '
return [accessory.get_name() for accessory in cls.__subclasses__()] | 6,202,222,336,773,028,000 | Returns a list of available neighbors. | binho/accessory.py | available_accessories | binhollc/binho-python-package | python | @classmethod
def available_accessories(cls):
' '
return [accessory.get_name() for accessory in cls.__subclasses__()] |
@classmethod
def from_name(cls, name, board, *args, **kwargs):
' Creates a new binhoAccessory object from its name. '
target_name = name.lower()
for subclass in cls.__subclasses__():
subclass_name = subclass.get_name()
if (target_name == subclass_name.lower()):
return subclass(board, *args, **kwargs)
raise DriverCapabilityError("No known driver for accessory '{}'.".format(name)) | 298,761,833,693,009,300 | Creates a new binhoAccessory object from its name. | binho/accessory.py | from_name | binhollc/binho-python-package | python | @classmethod
def from_name(cls, name, board, *args, **kwargs):
' '
target_name = name.lower()
for subclass in cls.__subclasses__():
subclass_name = subclass.get_name()
if (target_name == subclass_name.lower()):
return subclass(board, *args, **kwargs)
raise DriverCapabilityError("No known driver for accessory '{}'.".format(name)) |
def testAdminPagerDutyNotification(self):
'Test AdminPagerDutyNotification'
pass | 682,979,602,889,604,600 | Test AdminPagerDutyNotification | gen/pb_python/flyteidl/service/flyteadmin/test/test_admin_pager_duty_notification.py | testAdminPagerDutyNotification | EngHabu/flyteidl | python | def testAdminPagerDutyNotification(self):
pass |
def update(self, surface):
'Controla los eventos y coliciones de los sprites Palabras'
if ((not self.getClick()) and (not self.collide)):
self.rect.center = (self.posX, self.posY)
if self.getClick():
self.rect.center = pygame.mouse.get_pos()
if self.collide:
x = self.image.get_rect().size[0]
y = self.image.get_rect().size[1]
self.colli(x, y)
if (self.image.get_rect().size[0] <= 20):
self.rect.center = (0, 0)
surface.blit(self.getPalabraImagen(), self.getRect()) | 8,761,424,404,736,994,000 | Controla los eventos y coliciones de los sprites Palabras | Clases/Palabras.py | update | JorgeSchelotto/TrabajoFinalSeminarioPython | python | def update(self, surface):
if ((not self.getClick()) and (not self.collide)):
self.rect.center = (self.posX, self.posY)
if self.getClick():
self.rect.center = pygame.mouse.get_pos()
if self.collide:
x = self.image.get_rect().size[0]
y = self.image.get_rect().size[1]
self.colli(x, y)
if (self.image.get_rect().size[0] <= 20):
self.rect.center = (0, 0)
surface.blit(self.getPalabraImagen(), self.getRect()) |
def _set_axes_aspect_ratio(self, value):
"\n Set the aspect ratio of the axes in which the visualization is shown.\n This is a private method that is intended only for internal use, and it\n allows this viewer state class to adjust the limits accordingly when\n the aspect callback property is set to 'equal'\n "
self._axes_aspect_ratio = value
self._adjust_limits_aspect(aspect_adjustable='both') | 929,469,976,190,285,800 | Set the aspect ratio of the axes in which the visualization is shown.
This is a private method that is intended only for internal use, and it
allows this viewer state class to adjust the limits accordingly when
the aspect callback property is set to 'equal' | glue/viewers/matplotlib/state.py | _set_axes_aspect_ratio | cnheider/glue | python | def _set_axes_aspect_ratio(self, value):
"\n Set the aspect ratio of the axes in which the visualization is shown.\n This is a private method that is intended only for internal use, and it\n allows this viewer state class to adjust the limits accordingly when\n the aspect callback property is set to 'equal'\n "
self._axes_aspect_ratio = value
self._adjust_limits_aspect(aspect_adjustable='both') |
@avoid_circular
def _adjust_limits_aspect(self, *args, **kwargs):
'\n Adjust the limits of the visualization to take into account the aspect\n ratio. This only works if `_set_axes_aspect_ratio` has been called\n previously.\n '
if ((self.aspect == 'auto') or (self._axes_aspect_ratio is None)):
return
if ((self.x_min is None) or (self.x_max is None) or (self.y_min is None) or (self.y_max is None)):
return
aspect_adjustable = kwargs.pop('aspect_adjustable', 'auto')
changed = None
axes_ratio = self._axes_aspect_ratio
(x_min, x_max) = (self.x_min, self.x_max)
(y_min, y_max) = (self.y_min, self.y_max)
data_ratio = (abs((y_max - y_min)) / abs((x_max - x_min)))
if ((abs((data_ratio - axes_ratio)) / (0.5 * (data_ratio + axes_ratio))) > 0.01):
if (aspect_adjustable == 'both'):
x_mid = (0.5 * (x_min + x_max))
x_width = (abs((x_max - x_min)) * ((data_ratio / axes_ratio) ** 0.5))
y_mid = (0.5 * (y_min + y_max))
y_width = (abs((y_max - y_min)) / ((data_ratio / axes_ratio) ** 0.5))
x_min = (x_mid - (x_width / 2.0))
x_max = (x_mid + (x_width / 2.0))
y_min = (y_mid - (y_width / 2.0))
y_max = (y_mid + (y_width / 2.0))
elif (((aspect_adjustable == 'auto') and (data_ratio > axes_ratio)) or (aspect_adjustable == 'x')):
x_mid = (0.5 * (x_min + x_max))
x_width = (abs((y_max - y_min)) / axes_ratio)
x_min = (x_mid - (x_width / 2.0))
x_max = (x_mid + (x_width / 2.0))
else:
y_mid = (0.5 * (y_min + y_max))
y_width = (abs((x_max - x_min)) * axes_ratio)
y_min = (y_mid - (y_width / 2.0))
y_max = (y_mid + (y_width / 2.0))
with delay_callback(self, 'x_min', 'x_max', 'y_min', 'y_max'):
self.x_min = x_min
self.x_max = x_max
self.y_min = y_min
self.y_max = y_max | -8,841,196,313,293,820,000 | Adjust the limits of the visualization to take into account the aspect
ratio. This only works if `_set_axes_aspect_ratio` has been called
previously. | glue/viewers/matplotlib/state.py | _adjust_limits_aspect | cnheider/glue | python | @avoid_circular
def _adjust_limits_aspect(self, *args, **kwargs):
'\n Adjust the limits of the visualization to take into account the aspect\n ratio. This only works if `_set_axes_aspect_ratio` has been called\n previously.\n '
if ((self.aspect == 'auto') or (self._axes_aspect_ratio is None)):
return
if ((self.x_min is None) or (self.x_max is None) or (self.y_min is None) or (self.y_max is None)):
return
aspect_adjustable = kwargs.pop('aspect_adjustable', 'auto')
changed = None
axes_ratio = self._axes_aspect_ratio
(x_min, x_max) = (self.x_min, self.x_max)
(y_min, y_max) = (self.y_min, self.y_max)
data_ratio = (abs((y_max - y_min)) / abs((x_max - x_min)))
if ((abs((data_ratio - axes_ratio)) / (0.5 * (data_ratio + axes_ratio))) > 0.01):
if (aspect_adjustable == 'both'):
x_mid = (0.5 * (x_min + x_max))
x_width = (abs((x_max - x_min)) * ((data_ratio / axes_ratio) ** 0.5))
y_mid = (0.5 * (y_min + y_max))
y_width = (abs((y_max - y_min)) / ((data_ratio / axes_ratio) ** 0.5))
x_min = (x_mid - (x_width / 2.0))
x_max = (x_mid + (x_width / 2.0))
y_min = (y_mid - (y_width / 2.0))
y_max = (y_mid + (y_width / 2.0))
elif (((aspect_adjustable == 'auto') and (data_ratio > axes_ratio)) or (aspect_adjustable == 'x')):
x_mid = (0.5 * (x_min + x_max))
x_width = (abs((y_max - y_min)) / axes_ratio)
x_min = (x_mid - (x_width / 2.0))
x_max = (x_mid + (x_width / 2.0))
else:
y_mid = (0.5 * (y_min + y_max))
y_width = (abs((x_max - x_min)) * axes_ratio)
y_min = (y_mid - (y_width / 2.0))
y_max = (y_mid + (y_width / 2.0))
with delay_callback(self, 'x_min', 'x_max', 'y_min', 'y_max'):
self.x_min = x_min
self.x_max = x_max
self.y_min = y_min
self.y_max = y_max |
def _swap_endian(val, length):
'\n Swap the endianness of a number\n '
if (length <= 8):
return val
if (length <= 16):
return (((val & 65280) >> 8) | ((val & 255) << 8))
if (length <= 32):
return (((((val & 4278190080) >> 24) | ((val & 16711680) >> 8)) | ((val & 65280) << 8)) | ((val & 255) << 24))
raise Exception(('Cannot swap endianness for length ' + length)) | 554,214,614,013,435,900 | Swap the endianness of a number | test/sampleData/micropython/MCP4725.py | _swap_endian | google/cyanobyte | python | def _swap_endian(val, length):
'\n \n '
if (length <= 8):
return val
if (length <= 16):
return (((val & 65280) >> 8) | ((val & 255) << 8))
if (length <= 32):
return (((((val & 4278190080) >> 24) | ((val & 16711680) >> 8)) | ((val & 65280) << 8)) | ((val & 255) << 24))
raise Exception(('Cannot swap endianness for length ' + length)) |
def get_eeprom(self):
'\n If EEPROM is set, the saved voltage output will\n be loaded from power-on.\n\n '
byte_list = self.i2c.readfrom_mem(self.device_address, self.REGISTER_EEPROM, 1, addrsize=12)
val = 0
val = ((val << 8) | byte_list[0])
val = _swap_endian(val, 12)
return val | -7,357,650,423,980,757,000 | If EEPROM is set, the saved voltage output will
be loaded from power-on. | test/sampleData/micropython/MCP4725.py | get_eeprom | google/cyanobyte | python | def get_eeprom(self):
'\n If EEPROM is set, the saved voltage output will\n be loaded from power-on.\n\n '
byte_list = self.i2c.readfrom_mem(self.device_address, self.REGISTER_EEPROM, 1, addrsize=12)
val = 0
val = ((val << 8) | byte_list[0])
val = _swap_endian(val, 12)
return val |
def set_eeprom(self, data):
'\n If EEPROM is set, the saved voltage output will\n be loaded from power-on.\n\n '
data = _swap_endian(data, 12)
buffer = []
buffer[0] = ((data >> 0) & 255)
self.i2c.writeto_mem(self.device_address, self.REGISTER_EEPROM, buffer, addrsize=12) | 5,185,610,049,418,069,000 | If EEPROM is set, the saved voltage output will
be loaded from power-on. | test/sampleData/micropython/MCP4725.py | set_eeprom | google/cyanobyte | python | def set_eeprom(self, data):
'\n If EEPROM is set, the saved voltage output will\n be loaded from power-on.\n\n '
data = _swap_endian(data, 12)
buffer = []
buffer[0] = ((data >> 0) & 255)
self.i2c.writeto_mem(self.device_address, self.REGISTER_EEPROM, buffer, addrsize=12) |
def get_vout(self):
'\n VOut = (Vcc * value) / 4096\n The output is a range between 0 and Vcc with\n steps of Vcc/4096.\n In a 3.3v system, each step is 800 microvolts.\n\n '
byte_list = self.i2c.readfrom_mem(self.device_address, self.REGISTER_VOUT, 1, addrsize=12)
val = 0
val = ((val << 8) | byte_list[0])
val = _swap_endian(val, 12)
return val | -4,110,287,919,438,414,000 | VOut = (Vcc * value) / 4096
The output is a range between 0 and Vcc with
steps of Vcc/4096.
In a 3.3v system, each step is 800 microvolts. | test/sampleData/micropython/MCP4725.py | get_vout | google/cyanobyte | python | def get_vout(self):
'\n VOut = (Vcc * value) / 4096\n The output is a range between 0 and Vcc with\n steps of Vcc/4096.\n In a 3.3v system, each step is 800 microvolts.\n\n '
byte_list = self.i2c.readfrom_mem(self.device_address, self.REGISTER_VOUT, 1, addrsize=12)
val = 0
val = ((val << 8) | byte_list[0])
val = _swap_endian(val, 12)
return val |
def set_vout(self, data):
'\n VOut = (Vcc * value) / 4096\n The output is a range between 0 and Vcc with\n steps of Vcc/4096.\n In a 3.3v system, each step is 800 microvolts.\n\n '
data = _swap_endian(data, 12)
buffer = []
buffer[0] = ((data >> 0) & 255)
self.i2c.writeto_mem(self.device_address, self.REGISTER_VOUT, buffer, addrsize=12) | 1,188,569,324,987,678,500 | VOut = (Vcc * value) / 4096
The output is a range between 0 and Vcc with
steps of Vcc/4096.
In a 3.3v system, each step is 800 microvolts. | test/sampleData/micropython/MCP4725.py | set_vout | google/cyanobyte | python | def set_vout(self, data):
'\n VOut = (Vcc * value) / 4096\n The output is a range between 0 and Vcc with\n steps of Vcc/4096.\n In a 3.3v system, each step is 800 microvolts.\n\n '
data = _swap_endian(data, 12)
buffer = []
buffer[0] = ((data >> 0) & 255)
self.i2c.writeto_mem(self.device_address, self.REGISTER_VOUT, buffer, addrsize=12) |
def get_digitalout(self):
'\n Only allows you to send fully on or off\n\n '
val = self.get_eeprom()
val = (val & 8191)
return val | -3,150,044,481,258,214,400 | Only allows you to send fully on or off | test/sampleData/micropython/MCP4725.py | get_digitalout | google/cyanobyte | python | def get_digitalout(self):
'\n \n\n '
val = self.get_eeprom()
val = (val & 8191)
return val |
def set_digitalout(self, data):
'\n Only allows you to send fully on or off\n\n '
register_data = self.get_eeprom()
register_data = (register_data | data)
self.set_eeprom(register_data) | -634,805,178,311,054,800 | Only allows you to send fully on or off | test/sampleData/micropython/MCP4725.py | set_digitalout | google/cyanobyte | python | def set_digitalout(self, data):
'\n \n\n '
register_data = self.get_eeprom()
register_data = (register_data | data)
self.set_eeprom(register_data) |
def getvout_asvoltage(self, vcc):
'\n get vout\n\n '
voltage = None
value = self.get_eeprom()
voltage = ((value / 4096) * vcc)
return voltage | 8,789,105,480,186,073,000 | get vout | test/sampleData/micropython/MCP4725.py | getvout_asvoltage | google/cyanobyte | python | def getvout_asvoltage(self, vcc):
'\n \n\n '
voltage = None
value = self.get_eeprom()
voltage = ((value / 4096) * vcc)
return voltage |
def setvout_asvoltage(self, output, vcc):
'\n set vout\n\n '
output = ((output / vcc) * 4096)
self.set_eeprom(output) | -534,523,613,088,087,300 | set vout | test/sampleData/micropython/MCP4725.py | setvout_asvoltage | google/cyanobyte | python | def setvout_asvoltage(self, output, vcc):
'\n \n\n '
output = ((output / vcc) * 4096)
self.set_eeprom(output) |
def entropy_distribution(signal=None, delay=1, dimension=3, bins='Sturges', base=2):
'**Distribution Entropy (DistrEn)**\n\n Distribution Entropy (**DistrEn**, more commonly known as **DistEn**).\n\n Parameters\n ----------\n signal : Union[list, np.array, pd.Series]\n The signal (i.e., a time series) in the form of a vector of values.\n delay : int\n Time delay (often denoted *Tau* :math:`\\tau`, sometimes referred to as *lag*) in samples.\n See :func:`complexity_delay` to estimate the optimal value for this parameter.\n dimension : int\n Embedding Dimension (*m*, sometimes referred to as *d* or *order*). See\n :func:`complexity_dimension` to estimate the optimal value for this parameter.\n bins : int or str\n Method to find the number of bins. Can be a number, or one of ``"Sturges"``, ``"Rice"``,\n ``"Doane"``, or ``"sqrt"``.\n base : int\n The logarithmic base to use for :func:`entropy_shannon`.\n\n Returns\n --------\n distren : float\n The Distance Entropy entropy of the signal.\n info : dict\n A dictionary containing additional information regarding the parameters used.\n\n See Also\n --------\n entropy_shannon\n\n Examples\n ----------\n .. ipython:: python\n\n import neurokit2 as nk\n\n signal = nk.signal_simulate(duration=2, frequency=5)\n\n distren, info = nk.entropy_distribution(signal)\n distren\n\n References\n -----------\n * Li, P., Liu, C., Li, K., Zheng, D., Liu, C., & Hou, Y. (2015). Assessing the complexity of\n short-term heartbeat interval series by distribution entropy. Medical & biological\n engineering & computing, 53(1), 77-87.\n\n '
if (isinstance(signal, (np.ndarray, pd.DataFrame)) and (signal.ndim > 1)):
raise ValueError('Multidimensional inputs (e.g., matrices or multichannel data) are not supported yet.')
info = {'Dimension': dimension, 'Delay': delay, 'Bins': bins}
embedded = complexity_embedding(signal, delay=delay, dimension=dimension)
n = len(embedded)
d = np.zeros(round(((n * (n - 1)) / 2)))
for k in range(1, n):
Ix = (int(((k - 1) * (n - (k / 2)))), int((k * (n - ((k + 1) / 2)))))
d[Ix[0]:Ix[1]] = np.max(abs((np.tile(embedded[(k - 1), :], ((n - k), 1)) - embedded[k:, :])), axis=1)
n_d = len(d)
if isinstance(bins, str):
bins = bins.lower()
if (bins == 'sturges'):
n_bins = np.ceil((np.log2(n_d) + 1))
elif (bins == 'rice'):
n_bins = np.ceil((2 * (n_d ** (1 / 3))))
elif (bins == 'sqrt'):
n_bins = np.ceil(np.sqrt(n_d))
elif (bins == 'doanes'):
sigma = np.sqrt(((6 * (n_d - 2)) / ((n_d + 1) * (n_d + 3))))
n_bins = np.ceil(((1 + np.log2(n_d)) + np.log2((1 + abs((scipy.stats.skew(d) / sigma))))))
else:
raise Exception('Please enter a valid binning method')
else:
n_bins = bins
(freq, _) = np.histogram(d, int(n_bins))
freq = (freq / freq.sum())
(distren, _) = entropy_shannon(freq=freq, base=base)
distren = (distren / (np.log(n_bins) / np.log(base)))
return (distren, info) | 3,542,943,845,490,031,600 | **Distribution Entropy (DistrEn)**
Distribution Entropy (**DistrEn**, more commonly known as **DistEn**).
Parameters
----------
signal : Union[list, np.array, pd.Series]
The signal (i.e., a time series) in the form of a vector of values.
delay : int
Time delay (often denoted *Tau* :math:`\tau`, sometimes referred to as *lag*) in samples.
See :func:`complexity_delay` to estimate the optimal value for this parameter.
dimension : int
Embedding Dimension (*m*, sometimes referred to as *d* or *order*). See
:func:`complexity_dimension` to estimate the optimal value for this parameter.
bins : int or str
Method to find the number of bins. Can be a number, or one of ``"Sturges"``, ``"Rice"``,
``"Doane"``, or ``"sqrt"``.
base : int
The logarithmic base to use for :func:`entropy_shannon`.
Returns
--------
distren : float
The Distance Entropy entropy of the signal.
info : dict
A dictionary containing additional information regarding the parameters used.
See Also
--------
entropy_shannon
Examples
----------
.. ipython:: python
import neurokit2 as nk
signal = nk.signal_simulate(duration=2, frequency=5)
distren, info = nk.entropy_distribution(signal)
distren
References
-----------
* Li, P., Liu, C., Li, K., Zheng, D., Liu, C., & Hou, Y. (2015). Assessing the complexity of
short-term heartbeat interval series by distribution entropy. Medical & biological
engineering & computing, 53(1), 77-87. | neurokit2/complexity/entropy_distribution.py | entropy_distribution | danibene/NeuroKit | python | def entropy_distribution(signal=None, delay=1, dimension=3, bins='Sturges', base=2):
'**Distribution Entropy (DistrEn)**\n\n Distribution Entropy (**DistrEn**, more commonly known as **DistEn**).\n\n Parameters\n ----------\n signal : Union[list, np.array, pd.Series]\n The signal (i.e., a time series) in the form of a vector of values.\n delay : int\n Time delay (often denoted *Tau* :math:`\\tau`, sometimes referred to as *lag*) in samples.\n See :func:`complexity_delay` to estimate the optimal value for this parameter.\n dimension : int\n Embedding Dimension (*m*, sometimes referred to as *d* or *order*). See\n :func:`complexity_dimension` to estimate the optimal value for this parameter.\n bins : int or str\n Method to find the number of bins. Can be a number, or one of ``"Sturges"``, ``"Rice"``,\n ``"Doane"``, or ``"sqrt"``.\n base : int\n The logarithmic base to use for :func:`entropy_shannon`.\n\n Returns\n --------\n distren : float\n The Distance Entropy entropy of the signal.\n info : dict\n A dictionary containing additional information regarding the parameters used.\n\n See Also\n --------\n entropy_shannon\n\n Examples\n ----------\n .. ipython:: python\n\n import neurokit2 as nk\n\n signal = nk.signal_simulate(duration=2, frequency=5)\n\n distren, info = nk.entropy_distribution(signal)\n distren\n\n References\n -----------\n * Li, P., Liu, C., Li, K., Zheng, D., Liu, C., & Hou, Y. (2015). Assessing the complexity of\n short-term heartbeat interval series by distribution entropy. Medical & biological\n engineering & computing, 53(1), 77-87.\n\n '
if (isinstance(signal, (np.ndarray, pd.DataFrame)) and (signal.ndim > 1)):
raise ValueError('Multidimensional inputs (e.g., matrices or multichannel data) are not supported yet.')
info = {'Dimension': dimension, 'Delay': delay, 'Bins': bins}
embedded = complexity_embedding(signal, delay=delay, dimension=dimension)
n = len(embedded)
d = np.zeros(round(((n * (n - 1)) / 2)))
for k in range(1, n):
Ix = (int(((k - 1) * (n - (k / 2)))), int((k * (n - ((k + 1) / 2)))))
d[Ix[0]:Ix[1]] = np.max(abs((np.tile(embedded[(k - 1), :], ((n - k), 1)) - embedded[k:, :])), axis=1)
n_d = len(d)
if isinstance(bins, str):
bins = bins.lower()
if (bins == 'sturges'):
n_bins = np.ceil((np.log2(n_d) + 1))
elif (bins == 'rice'):
n_bins = np.ceil((2 * (n_d ** (1 / 3))))
elif (bins == 'sqrt'):
n_bins = np.ceil(np.sqrt(n_d))
elif (bins == 'doanes'):
sigma = np.sqrt(((6 * (n_d - 2)) / ((n_d + 1) * (n_d + 3))))
n_bins = np.ceil(((1 + np.log2(n_d)) + np.log2((1 + abs((scipy.stats.skew(d) / sigma))))))
else:
raise Exception('Please enter a valid binning method')
else:
n_bins = bins
(freq, _) = np.histogram(d, int(n_bins))
freq = (freq / freq.sum())
(distren, _) = entropy_shannon(freq=freq, base=base)
distren = (distren / (np.log(n_bins) / np.log(base)))
return (distren, info) |
def get_db():
'Opens a new database connection if there is none yet for the\n current application context.\n '
if (not hasattr(g, 'db')):
g.db = DatabaseConnection(os.getenv('TOBY_DB_USER', 'toby'), os.environ['TOBY_DB_PASSWORD'])
return g.db | 2,712,110,794,052,895,000 | Opens a new database connection if there is none yet for the
current application context. | toby.py | get_db | axxiao/toby | python | def get_db():
'Opens a new database connection if there is none yet for the\n current application context.\n '
if (not hasattr(g, 'db')):
g.db = DatabaseConnection(os.getenv('TOBY_DB_USER', 'toby'), os.environ['TOBY_DB_PASSWORD'])
return g.db |
@app.teardown_appcontext
def close_db(error):
'Closes the database again at the end of the request.'
if hasattr(g, 'db'):
g.db.disconnect()
if error:
logger.error(('Database connection closed because of :' + str(error))) | -4,155,511,414,969,864,000 | Closes the database again at the end of the request. | toby.py | close_db | axxiao/toby | python | @app.teardown_appcontext
def close_db(error):
if hasattr(g, 'db'):
g.db.disconnect()
if error:
logger.error(('Database connection closed because of :' + str(error))) |
def get_transport_class(cls, label: str=None) -> Type[FeedServiceTransport]:
'Returns an appropriate transport class.\n\n Args:\n label: The name of the desired transport. If none is\n provided, then the first transport in the registry is used.\n\n Returns:\n The transport class to use.\n '
if label:
return cls._transport_registry[label]
return next(iter(cls._transport_registry.values())) | 3,256,379,998,317,467,000 | Returns an appropriate transport class.
Args:
label: The name of the desired transport. If none is
provided, then the first transport in the registry is used.
Returns:
The transport class to use. | google/ads/googleads/v10/services/services/feed_service/client.py | get_transport_class | JakobSteixner/google-ads-python | python | def get_transport_class(cls, label: str=None) -> Type[FeedServiceTransport]:
'Returns an appropriate transport class.\n\n Args:\n label: The name of the desired transport. If none is\n provided, then the first transport in the registry is used.\n\n Returns:\n The transport class to use.\n '
if label:
return cls._transport_registry[label]
return next(iter(cls._transport_registry.values())) |
@staticmethod
def _get_default_mtls_endpoint(api_endpoint):
'Converts api endpoint to mTLS endpoint.\n\n Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to\n "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.\n Args:\n api_endpoint (Optional[str]): the api endpoint to convert.\n Returns:\n str: converted mTLS api endpoint.\n '
if (not api_endpoint):
return api_endpoint
mtls_endpoint_re = re.compile('(?P<name>[^.]+)(?P<mtls>\\.mtls)?(?P<sandbox>\\.sandbox)?(?P<googledomain>\\.googleapis\\.com)?')
m = mtls_endpoint_re.match(api_endpoint)
(name, mtls, sandbox, googledomain) = m.groups()
if (mtls or (not googledomain)):
return api_endpoint
if sandbox:
return api_endpoint.replace('sandbox.googleapis.com', 'mtls.sandbox.googleapis.com')
return api_endpoint.replace('.googleapis.com', '.mtls.googleapis.com') | 7,533,698,565,164,944,000 | Converts api endpoint to mTLS endpoint.
Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
"*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
Args:
api_endpoint (Optional[str]): the api endpoint to convert.
Returns:
str: converted mTLS api endpoint. | google/ads/googleads/v10/services/services/feed_service/client.py | _get_default_mtls_endpoint | JakobSteixner/google-ads-python | python | @staticmethod
def _get_default_mtls_endpoint(api_endpoint):
'Converts api endpoint to mTLS endpoint.\n\n Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to\n "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.\n Args:\n api_endpoint (Optional[str]): the api endpoint to convert.\n Returns:\n str: converted mTLS api endpoint.\n '
if (not api_endpoint):
return api_endpoint
mtls_endpoint_re = re.compile('(?P<name>[^.]+)(?P<mtls>\\.mtls)?(?P<sandbox>\\.sandbox)?(?P<googledomain>\\.googleapis\\.com)?')
m = mtls_endpoint_re.match(api_endpoint)
(name, mtls, sandbox, googledomain) = m.groups()
if (mtls or (not googledomain)):
return api_endpoint
if sandbox:
return api_endpoint.replace('sandbox.googleapis.com', 'mtls.sandbox.googleapis.com')
return api_endpoint.replace('.googleapis.com', '.mtls.googleapis.com') |
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
'Creates an instance of this client using the provided credentials\n info.\n\n Args:\n info (dict): The service account private key info.\n args: Additional arguments to pass to the constructor.\n kwargs: Additional arguments to pass to the constructor.\n\n Returns:\n FeedServiceClient: The constructed client.\n '
credentials = service_account.Credentials.from_service_account_info(info)
kwargs['credentials'] = credentials
return cls(*args, **kwargs) | 7,061,353,587,626,594,000 | Creates an instance of this client using the provided credentials
info.
Args:
info (dict): The service account private key info.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
FeedServiceClient: The constructed client. | google/ads/googleads/v10/services/services/feed_service/client.py | from_service_account_info | JakobSteixner/google-ads-python | python | @classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
'Creates an instance of this client using the provided credentials\n info.\n\n Args:\n info (dict): The service account private key info.\n args: Additional arguments to pass to the constructor.\n kwargs: Additional arguments to pass to the constructor.\n\n Returns:\n FeedServiceClient: The constructed client.\n '
credentials = service_account.Credentials.from_service_account_info(info)
kwargs['credentials'] = credentials
return cls(*args, **kwargs) |
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
'Creates an instance of this client using the provided credentials\n file.\n\n Args:\n filename (str): The path to the service account private key json\n file.\n args: Additional arguments to pass to the constructor.\n kwargs: Additional arguments to pass to the constructor.\n\n Returns:\n FeedServiceClient: The constructed client.\n '
credentials = service_account.Credentials.from_service_account_file(filename)
kwargs['credentials'] = credentials
return cls(*args, **kwargs) | 7,600,690,789,200,093,000 | Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
FeedServiceClient: The constructed client. | google/ads/googleads/v10/services/services/feed_service/client.py | from_service_account_file | JakobSteixner/google-ads-python | python | @classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
'Creates an instance of this client using the provided credentials\n file.\n\n Args:\n filename (str): The path to the service account private key json\n file.\n args: Additional arguments to pass to the constructor.\n kwargs: Additional arguments to pass to the constructor.\n\n Returns:\n FeedServiceClient: The constructed client.\n '
credentials = service_account.Credentials.from_service_account_file(filename)
kwargs['credentials'] = credentials
return cls(*args, **kwargs) |
@property
def transport(self) -> FeedServiceTransport:
'Returns the transport used by the client instance.\n\n Returns:\n FeedServiceTransport: The transport used by the client\n instance.\n '
return self._transport | 4,740,376,337,738,445,000 | Returns the transport used by the client instance.
Returns:
FeedServiceTransport: The transport used by the client
instance. | google/ads/googleads/v10/services/services/feed_service/client.py | transport | JakobSteixner/google-ads-python | python | @property
def transport(self) -> FeedServiceTransport:
'Returns the transport used by the client instance.\n\n Returns:\n FeedServiceTransport: The transport used by the client\n instance.\n '
return self._transport |
def __exit__(self, type, value, traceback):
"Releases underlying transport's resources.\n\n .. warning::\n ONLY use as a context manager if the transport is NOT shared\n with other clients! Exiting the with block will CLOSE the transport\n and may cause errors in other clients!\n "
self.transport.close() | 7,840,855,355,632,227,000 | Releases underlying transport's resources.
.. warning::
ONLY use as a context manager if the transport is NOT shared
with other clients! Exiting the with block will CLOSE the transport
and may cause errors in other clients! | google/ads/googleads/v10/services/services/feed_service/client.py | __exit__ | JakobSteixner/google-ads-python | python | def __exit__(self, type, value, traceback):
"Releases underlying transport's resources.\n\n .. warning::\n ONLY use as a context manager if the transport is NOT shared\n with other clients! Exiting the with block will CLOSE the transport\n and may cause errors in other clients!\n "
self.transport.close() |
@staticmethod
def feed_path(customer_id: str, feed_id: str) -> str:
'Returns a fully-qualified feed string.'
return 'customers/{customer_id}/feeds/{feed_id}'.format(customer_id=customer_id, feed_id=feed_id) | -1,869,738,995,331,725,300 | Returns a fully-qualified feed string. | google/ads/googleads/v10/services/services/feed_service/client.py | feed_path | JakobSteixner/google-ads-python | python | @staticmethod
def feed_path(customer_id: str, feed_id: str) -> str:
return 'customers/{customer_id}/feeds/{feed_id}'.format(customer_id=customer_id, feed_id=feed_id) |
@staticmethod
def parse_feed_path(path: str) -> Dict[(str, str)]:
'Parses a feed path into its component segments.'
m = re.match('^customers/(?P<customer_id>.+?)/feeds/(?P<feed_id>.+?)$', path)
return (m.groupdict() if m else {}) | 4,855,498,160,379,343,000 | Parses a feed path into its component segments. | google/ads/googleads/v10/services/services/feed_service/client.py | parse_feed_path | JakobSteixner/google-ads-python | python | @staticmethod
def parse_feed_path(path: str) -> Dict[(str, str)]:
m = re.match('^customers/(?P<customer_id>.+?)/feeds/(?P<feed_id>.+?)$', path)
return (m.groupdict() if m else {}) |
@staticmethod
def common_billing_account_path(billing_account: str) -> str:
'Returns a fully-qualified billing_account string.'
return 'billingAccounts/{billing_account}'.format(billing_account=billing_account) | 5,123,899,605,328,763,000 | Returns a fully-qualified billing_account string. | google/ads/googleads/v10/services/services/feed_service/client.py | common_billing_account_path | JakobSteixner/google-ads-python | python | @staticmethod
def common_billing_account_path(billing_account: str) -> str:
return 'billingAccounts/{billing_account}'.format(billing_account=billing_account) |
@staticmethod
def parse_common_billing_account_path(path: str) -> Dict[(str, str)]:
'Parse a billing_account path into its component segments.'
m = re.match('^billingAccounts/(?P<billing_account>.+?)$', path)
return (m.groupdict() if m else {}) | 3,539,036,522,285,068,000 | Parse a billing_account path into its component segments. | google/ads/googleads/v10/services/services/feed_service/client.py | parse_common_billing_account_path | JakobSteixner/google-ads-python | python | @staticmethod
def parse_common_billing_account_path(path: str) -> Dict[(str, str)]:
m = re.match('^billingAccounts/(?P<billing_account>.+?)$', path)
return (m.groupdict() if m else {}) |
@staticmethod
def common_folder_path(folder: str) -> str:
'Returns a fully-qualified folder string.'
return 'folders/{folder}'.format(folder=folder) | -6,142,497,583,881,718,000 | Returns a fully-qualified folder string. | google/ads/googleads/v10/services/services/feed_service/client.py | common_folder_path | JakobSteixner/google-ads-python | python | @staticmethod
def common_folder_path(folder: str) -> str:
return 'folders/{folder}'.format(folder=folder) |
@staticmethod
def parse_common_folder_path(path: str) -> Dict[(str, str)]:
'Parse a folder path into its component segments.'
m = re.match('^folders/(?P<folder>.+?)$', path)
return (m.groupdict() if m else {}) | 7,731,323,619,502,445,000 | Parse a folder path into its component segments. | google/ads/googleads/v10/services/services/feed_service/client.py | parse_common_folder_path | JakobSteixner/google-ads-python | python | @staticmethod
def parse_common_folder_path(path: str) -> Dict[(str, str)]:
m = re.match('^folders/(?P<folder>.+?)$', path)
return (m.groupdict() if m else {}) |
@staticmethod
def common_organization_path(organization: str) -> str:
'Returns a fully-qualified organization string.'
return 'organizations/{organization}'.format(organization=organization) | -1,733,580,681,013,462,000 | Returns a fully-qualified organization string. | google/ads/googleads/v10/services/services/feed_service/client.py | common_organization_path | JakobSteixner/google-ads-python | python | @staticmethod
def common_organization_path(organization: str) -> str:
return 'organizations/{organization}'.format(organization=organization) |
@staticmethod
def parse_common_organization_path(path: str) -> Dict[(str, str)]:
'Parse a organization path into its component segments.'
m = re.match('^organizations/(?P<organization>.+?)$', path)
return (m.groupdict() if m else {}) | 6,176,747,584,094,183,000 | Parse a organization path into its component segments. | google/ads/googleads/v10/services/services/feed_service/client.py | parse_common_organization_path | JakobSteixner/google-ads-python | python | @staticmethod
def parse_common_organization_path(path: str) -> Dict[(str, str)]:
m = re.match('^organizations/(?P<organization>.+?)$', path)
return (m.groupdict() if m else {}) |
@staticmethod
def common_project_path(project: str) -> str:
'Returns a fully-qualified project string.'
return 'projects/{project}'.format(project=project) | -124,327,816,620,303,040 | Returns a fully-qualified project string. | google/ads/googleads/v10/services/services/feed_service/client.py | common_project_path | JakobSteixner/google-ads-python | python | @staticmethod
def common_project_path(project: str) -> str:
return 'projects/{project}'.format(project=project) |
@staticmethod
def parse_common_project_path(path: str) -> Dict[(str, str)]:
'Parse a project path into its component segments.'
m = re.match('^projects/(?P<project>.+?)$', path)
return (m.groupdict() if m else {}) | -6,609,324,249,468,844,000 | Parse a project path into its component segments. | google/ads/googleads/v10/services/services/feed_service/client.py | parse_common_project_path | JakobSteixner/google-ads-python | python | @staticmethod
def parse_common_project_path(path: str) -> Dict[(str, str)]:
m = re.match('^projects/(?P<project>.+?)$', path)
return (m.groupdict() if m else {}) |
@staticmethod
def common_location_path(project: str, location: str) -> str:
'Returns a fully-qualified location string.'
return 'projects/{project}/locations/{location}'.format(project=project, location=location) | 8,215,176,652,370,049,000 | Returns a fully-qualified location string. | google/ads/googleads/v10/services/services/feed_service/client.py | common_location_path | JakobSteixner/google-ads-python | python | @staticmethod
def common_location_path(project: str, location: str) -> str:
return 'projects/{project}/locations/{location}'.format(project=project, location=location) |
@staticmethod
def parse_common_location_path(path: str) -> Dict[(str, str)]:
'Parse a location path into its component segments.'
m = re.match('^projects/(?P<project>.+?)/locations/(?P<location>.+?)$', path)
return (m.groupdict() if m else {}) | 1,703,235,435,027,079,400 | Parse a location path into its component segments. | google/ads/googleads/v10/services/services/feed_service/client.py | parse_common_location_path | JakobSteixner/google-ads-python | python | @staticmethod
def parse_common_location_path(path: str) -> Dict[(str, str)]:
m = re.match('^projects/(?P<project>.+?)/locations/(?P<location>.+?)$', path)
return (m.groupdict() if m else {}) |
def __init__(self, *, credentials: Optional[ga_credentials.Credentials]=None, transport: Union[(str, FeedServiceTransport, None)]=None, client_options: Optional[client_options_lib.ClientOptions]=None, client_info: gapic_v1.client_info.ClientInfo=DEFAULT_CLIENT_INFO) -> None:
'Instantiates the feed service client.\n\n Args:\n credentials (Optional[google.auth.credentials.Credentials]): The\n authorization credentials to attach to requests. These\n credentials identify the application to the service; if none\n are specified, the client will attempt to ascertain the\n credentials from the environment.\n transport (Union[str, FeedServiceTransport]): The\n transport to use. If set to None, a transport is chosen\n automatically.\n client_options (google.api_core.client_options.ClientOptions): Custom options for the\n client. It won\'t take effect if a ``transport`` instance is provided.\n (1) The ``api_endpoint`` property can be used to override the\n default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT\n environment variable can also be used to override the endpoint:\n "always" (always use the default mTLS endpoint), "never" (always\n use the default regular endpoint) and "auto" (auto switch to the\n default mTLS endpoint if client certificate is present, this is\n the default value). However, the ``api_endpoint`` property takes\n precedence if provided.\n (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable\n is "true", then the ``client_cert_source`` property can be used\n to provide client certificate for mutual TLS transport. If\n not provided, the default SSL client certificate will be used if\n present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not\n set, no client certificate will be used.\n client_info (google.api_core.gapic_v1.client_info.ClientInfo):\n The client info used to send a user-agent string along with\n API requests. If ``None``, then default info will be used.\n Generally, you only need to set this if you\'re developing\n your own client library.\n\n Raises:\n google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport\n creation failed for any reason.\n '
if isinstance(client_options, dict):
client_options = client_options_lib.from_dict(client_options)
if (client_options is None):
client_options = client_options_lib.ClientOptions()
if (os.getenv('GOOGLE_API_USE_CLIENT_CERTIFICATE', 'false') not in ('true', 'false')):
raise ValueError('Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`')
use_client_cert = (os.getenv('GOOGLE_API_USE_CLIENT_CERTIFICATE', 'false') == 'true')
client_cert_source_func = None
is_mtls = False
if use_client_cert:
if client_options.client_cert_source:
is_mtls = True
client_cert_source_func = client_options.client_cert_source
else:
is_mtls = mtls.has_default_client_cert_source()
if is_mtls:
client_cert_source_func = mtls.default_client_cert_source()
else:
client_cert_source_func = None
if (client_options.api_endpoint is not None):
api_endpoint = client_options.api_endpoint
else:
use_mtls_env = os.getenv('GOOGLE_API_USE_MTLS_ENDPOINT', 'auto')
if (use_mtls_env == 'never'):
api_endpoint = self.DEFAULT_ENDPOINT
elif (use_mtls_env == 'always'):
api_endpoint = self.DEFAULT_MTLS_ENDPOINT
elif (use_mtls_env == 'auto'):
api_endpoint = (self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT)
else:
raise MutualTLSChannelError('Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always')
if isinstance(transport, FeedServiceTransport):
if (credentials or client_options.credentials_file):
raise ValueError('When providing a transport instance, provide its credentials directly.')
if client_options.scopes:
raise ValueError('When providing a transport instance, provide its scopes directly.')
self._transport = transport
else:
Transport = type(self).get_transport_class(transport)
self._transport = Transport(credentials=credentials, credentials_file=client_options.credentials_file, host=api_endpoint, scopes=client_options.scopes, client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True) | 6,203,537,695,892,571,000 | Instantiates the feed service client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
transport (Union[str, FeedServiceTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
client_options (google.api_core.client_options.ClientOptions): Custom options for the
client. It won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
use the default regular endpoint) and "auto" (auto switch to the
default mTLS endpoint if client certificate is present, this is
the default value). However, the ``api_endpoint`` property takes
precedence if provided.
(2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
is "true", then the ``client_cert_source`` property can be used
to provide client certificate for mutual TLS transport. If
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason. | google/ads/googleads/v10/services/services/feed_service/client.py | __init__ | JakobSteixner/google-ads-python | python | def __init__(self, *, credentials: Optional[ga_credentials.Credentials]=None, transport: Union[(str, FeedServiceTransport, None)]=None, client_options: Optional[client_options_lib.ClientOptions]=None, client_info: gapic_v1.client_info.ClientInfo=DEFAULT_CLIENT_INFO) -> None:
'Instantiates the feed service client.\n\n Args:\n credentials (Optional[google.auth.credentials.Credentials]): The\n authorization credentials to attach to requests. These\n credentials identify the application to the service; if none\n are specified, the client will attempt to ascertain the\n credentials from the environment.\n transport (Union[str, FeedServiceTransport]): The\n transport to use. If set to None, a transport is chosen\n automatically.\n client_options (google.api_core.client_options.ClientOptions): Custom options for the\n client. It won\'t take effect if a ``transport`` instance is provided.\n (1) The ``api_endpoint`` property can be used to override the\n default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT\n environment variable can also be used to override the endpoint:\n "always" (always use the default mTLS endpoint), "never" (always\n use the default regular endpoint) and "auto" (auto switch to the\n default mTLS endpoint if client certificate is present, this is\n the default value). However, the ``api_endpoint`` property takes\n precedence if provided.\n (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable\n is "true", then the ``client_cert_source`` property can be used\n to provide client certificate for mutual TLS transport. If\n not provided, the default SSL client certificate will be used if\n present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not\n set, no client certificate will be used.\n client_info (google.api_core.gapic_v1.client_info.ClientInfo):\n The client info used to send a user-agent string along with\n API requests. If ``None``, then default info will be used.\n Generally, you only need to set this if you\'re developing\n your own client library.\n\n Raises:\n google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport\n creation failed for any reason.\n '
if isinstance(client_options, dict):
client_options = client_options_lib.from_dict(client_options)
if (client_options is None):
client_options = client_options_lib.ClientOptions()
if (os.getenv('GOOGLE_API_USE_CLIENT_CERTIFICATE', 'false') not in ('true', 'false')):
raise ValueError('Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`')
use_client_cert = (os.getenv('GOOGLE_API_USE_CLIENT_CERTIFICATE', 'false') == 'true')
client_cert_source_func = None
is_mtls = False
if use_client_cert:
if client_options.client_cert_source:
is_mtls = True
client_cert_source_func = client_options.client_cert_source
else:
is_mtls = mtls.has_default_client_cert_source()
if is_mtls:
client_cert_source_func = mtls.default_client_cert_source()
else:
client_cert_source_func = None
if (client_options.api_endpoint is not None):
api_endpoint = client_options.api_endpoint
else:
use_mtls_env = os.getenv('GOOGLE_API_USE_MTLS_ENDPOINT', 'auto')
if (use_mtls_env == 'never'):
api_endpoint = self.DEFAULT_ENDPOINT
elif (use_mtls_env == 'always'):
api_endpoint = self.DEFAULT_MTLS_ENDPOINT
elif (use_mtls_env == 'auto'):
api_endpoint = (self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT)
else:
raise MutualTLSChannelError('Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always')
if isinstance(transport, FeedServiceTransport):
if (credentials or client_options.credentials_file):
raise ValueError('When providing a transport instance, provide its credentials directly.')
if client_options.scopes:
raise ValueError('When providing a transport instance, provide its scopes directly.')
self._transport = transport
else:
Transport = type(self).get_transport_class(transport)
self._transport = Transport(credentials=credentials, credentials_file=client_options.credentials_file, host=api_endpoint, scopes=client_options.scopes, client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True) |
def mutate_feeds(self, request: Union[(feed_service.MutateFeedsRequest, dict)]=None, *, customer_id: str=None, operations: Sequence[feed_service.FeedOperation]=None, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: float=None, metadata: Sequence[Tuple[(str, str)]]=()) -> feed_service.MutateFeedsResponse:
'Creates, updates, or removes feeds. Operation statuses are\n returned.\n\n List of thrown errors: `AuthenticationError <>`__\n `AuthorizationError <>`__ `CollectionSizeError <>`__\n `DatabaseError <>`__ `DistinctError <>`__ `FeedError <>`__\n `FieldError <>`__ `FieldMaskError <>`__ `HeaderError <>`__\n `IdError <>`__ `InternalError <>`__ `ListOperationError <>`__\n `MutateError <>`__ `NewResourceCreationError <>`__\n `NotEmptyError <>`__ `NullError <>`__ `OperatorError <>`__\n `QuotaError <>`__ `RangeError <>`__ `RequestError <>`__\n `ResourceCountLimitExceededError <>`__ `SizeLimitError <>`__\n `StringFormatError <>`__ `StringLengthError <>`__\n\n Args:\n request (Union[google.ads.googleads.v10.services.types.MutateFeedsRequest, dict]):\n The request object. Request message for\n [FeedService.MutateFeeds][google.ads.googleads.v10.services.FeedService.MutateFeeds].\n customer_id (str):\n Required. The ID of the customer\n whose feeds are being modified.\n\n This corresponds to the ``customer_id`` field\n on the ``request`` instance; if ``request`` is provided, this\n should not be set.\n operations (Sequence[google.ads.googleads.v10.services.types.FeedOperation]):\n Required. The list of operations to\n perform on individual feeds.\n\n This corresponds to the ``operations`` field\n on the ``request`` instance; if ``request`` is provided, this\n should not be set.\n retry (google.api_core.retry.Retry): Designation of what errors, if any,\n should be retried.\n timeout (float): The timeout for this request.\n metadata (Sequence[Tuple[str, str]]): Strings which should be\n sent along with the request as metadata.\n\n Returns:\n google.ads.googleads.v10.services.types.MutateFeedsResponse:\n Response message for an feed mutate.\n '
has_flattened_params = any([customer_id, operations])
if ((request is not None) and has_flattened_params):
raise ValueError('If the `request` argument is set, then none of the individual field arguments should be set.')
if (not isinstance(request, feed_service.MutateFeedsRequest)):
request = feed_service.MutateFeedsRequest(request)
if (customer_id is not None):
request.customer_id = customer_id
if (operations is not None):
request.operations = operations
rpc = self._transport._wrapped_methods[self._transport.mutate_feeds]
metadata = (tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((('customer_id', request.customer_id),)),))
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata)
return response | -6,531,921,536,767,129,000 | Creates, updates, or removes feeds. Operation statuses are
returned.
List of thrown errors: `AuthenticationError <>`__
`AuthorizationError <>`__ `CollectionSizeError <>`__
`DatabaseError <>`__ `DistinctError <>`__ `FeedError <>`__
`FieldError <>`__ `FieldMaskError <>`__ `HeaderError <>`__
`IdError <>`__ `InternalError <>`__ `ListOperationError <>`__
`MutateError <>`__ `NewResourceCreationError <>`__
`NotEmptyError <>`__ `NullError <>`__ `OperatorError <>`__
`QuotaError <>`__ `RangeError <>`__ `RequestError <>`__
`ResourceCountLimitExceededError <>`__ `SizeLimitError <>`__
`StringFormatError <>`__ `StringLengthError <>`__
Args:
request (Union[google.ads.googleads.v10.services.types.MutateFeedsRequest, dict]):
The request object. Request message for
[FeedService.MutateFeeds][google.ads.googleads.v10.services.FeedService.MutateFeeds].
customer_id (str):
Required. The ID of the customer
whose feeds are being modified.
This corresponds to the ``customer_id`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
operations (Sequence[google.ads.googleads.v10.services.types.FeedOperation]):
Required. The list of operations to
perform on individual feeds.
This corresponds to the ``operations`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.ads.googleads.v10.services.types.MutateFeedsResponse:
Response message for an feed mutate. | google/ads/googleads/v10/services/services/feed_service/client.py | mutate_feeds | JakobSteixner/google-ads-python | python | def mutate_feeds(self, request: Union[(feed_service.MutateFeedsRequest, dict)]=None, *, customer_id: str=None, operations: Sequence[feed_service.FeedOperation]=None, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: float=None, metadata: Sequence[Tuple[(str, str)]]=()) -> feed_service.MutateFeedsResponse:
'Creates, updates, or removes feeds. Operation statuses are\n returned.\n\n List of thrown errors: `AuthenticationError <>`__\n `AuthorizationError <>`__ `CollectionSizeError <>`__\n `DatabaseError <>`__ `DistinctError <>`__ `FeedError <>`__\n `FieldError <>`__ `FieldMaskError <>`__ `HeaderError <>`__\n `IdError <>`__ `InternalError <>`__ `ListOperationError <>`__\n `MutateError <>`__ `NewResourceCreationError <>`__\n `NotEmptyError <>`__ `NullError <>`__ `OperatorError <>`__\n `QuotaError <>`__ `RangeError <>`__ `RequestError <>`__\n `ResourceCountLimitExceededError <>`__ `SizeLimitError <>`__\n `StringFormatError <>`__ `StringLengthError <>`__\n\n Args:\n request (Union[google.ads.googleads.v10.services.types.MutateFeedsRequest, dict]):\n The request object. Request message for\n [FeedService.MutateFeeds][google.ads.googleads.v10.services.FeedService.MutateFeeds].\n customer_id (str):\n Required. The ID of the customer\n whose feeds are being modified.\n\n This corresponds to the ``customer_id`` field\n on the ``request`` instance; if ``request`` is provided, this\n should not be set.\n operations (Sequence[google.ads.googleads.v10.services.types.FeedOperation]):\n Required. The list of operations to\n perform on individual feeds.\n\n This corresponds to the ``operations`` field\n on the ``request`` instance; if ``request`` is provided, this\n should not be set.\n retry (google.api_core.retry.Retry): Designation of what errors, if any,\n should be retried.\n timeout (float): The timeout for this request.\n metadata (Sequence[Tuple[str, str]]): Strings which should be\n sent along with the request as metadata.\n\n Returns:\n google.ads.googleads.v10.services.types.MutateFeedsResponse:\n Response message for an feed mutate.\n '
has_flattened_params = any([customer_id, operations])
if ((request is not None) and has_flattened_params):
raise ValueError('If the `request` argument is set, then none of the individual field arguments should be set.')
if (not isinstance(request, feed_service.MutateFeedsRequest)):
request = feed_service.MutateFeedsRequest(request)
if (customer_id is not None):
request.customer_id = customer_id
if (operations is not None):
request.operations = operations
rpc = self._transport._wrapped_methods[self._transport.mutate_feeds]
metadata = (tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((('customer_id', request.customer_id),)),))
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata)
return response |
def get_dataset_example_mnist(path_dataset, name_dataset, using_test_dataset):
"\n read input images (vector), dump into \n '.pickle' format for next load, and return it as a numpy array.\n "
flag_dataloaded = 0
if ((name_dataset != 'mnist_test_example') and (name_dataset != 'mnist_train_example')):
raise Exception('You have provide the wrong dataset name or path, please check carefully')
else:
dataset_path_name = (path_dataset + name_dataset)
if os.path.isfile(('%s.pickle' % dataset_path_name)):
example = pickle.load(open(('%s.pickle' % dataset_path_name)))
flag_dataloaded = 1
else:
flag_datasetsource = (((os.path.isfile((path_dataset + 'train-images.idx3-ubyte')) & os.path.isfile((path_dataset + 'train-labels.idx1-ubyte'))) & os.path.isfile((path_dataset + 't10k-images.idx3-ubyte'))) & os.path.isfile((path_dataset + 't10k-labels.idx1-ubyte')))
if (flag_datasetsource == False):
raise Exception(("You haven't downloaded the dataset into the %s!" % path_dataset))
else:
if using_test_dataset:
image = open((path_dataset + 't10k-images.idx3-ubyte'), 'rb')
else:
image = open((path_dataset + 'train-images.idx3-ubyte'), 'rb')
image.read(4)
num_image = unpack('>I', image.read(4))[0]
height_image = unpack('>I', image.read(4))[0]
length_image = unpack('>I', image.read(4))[0]
example = np.zeros((num_image, height_image, length_image), dtype=np.uint8)
for i in xrange(num_image):
example[i] = [[unpack('>B', image.read(1))[0] for m in xrange(length_image)] for n in xrange(height_image)]
pickle.dump(example, open(('%s.pickle' % dataset_path_name), 'wb'))
flag_dataloaded = 1
if (flag_dataloaded == 0):
raise Exception('Failed to load the required dataset, please check the name_dataset and other printed information!')
else:
return example | 1,705,901,212,704,732,700 | read input images (vector), dump into
'.pickle' format for next load, and return it as a numpy array. | Spike generation/spike_recorder_focal.py | get_dataset_example_mnist | Mary-Shi/Three-SNN-learning-algorithms-in-Brian2 | python | def get_dataset_example_mnist(path_dataset, name_dataset, using_test_dataset):
"\n read input images (vector), dump into \n '.pickle' format for next load, and return it as a numpy array.\n "
flag_dataloaded = 0
if ((name_dataset != 'mnist_test_example') and (name_dataset != 'mnist_train_example')):
raise Exception('You have provide the wrong dataset name or path, please check carefully')
else:
dataset_path_name = (path_dataset + name_dataset)
if os.path.isfile(('%s.pickle' % dataset_path_name)):
example = pickle.load(open(('%s.pickle' % dataset_path_name)))
flag_dataloaded = 1
else:
flag_datasetsource = (((os.path.isfile((path_dataset + 'train-images.idx3-ubyte')) & os.path.isfile((path_dataset + 'train-labels.idx1-ubyte'))) & os.path.isfile((path_dataset + 't10k-images.idx3-ubyte'))) & os.path.isfile((path_dataset + 't10k-labels.idx1-ubyte')))
if (flag_datasetsource == False):
raise Exception(("You haven't downloaded the dataset into the %s!" % path_dataset))
else:
if using_test_dataset:
image = open((path_dataset + 't10k-images.idx3-ubyte'), 'rb')
else:
image = open((path_dataset + 'train-images.idx3-ubyte'), 'rb')
image.read(4)
num_image = unpack('>I', image.read(4))[0]
height_image = unpack('>I', image.read(4))[0]
length_image = unpack('>I', image.read(4))[0]
example = np.zeros((num_image, height_image, length_image), dtype=np.uint8)
for i in xrange(num_image):
example[i] = [[unpack('>B', image.read(1))[0] for m in xrange(length_image)] for n in xrange(height_image)]
pickle.dump(example, open(('%s.pickle' % dataset_path_name), 'wb'))
flag_dataloaded = 1
if (flag_dataloaded == 0):
raise Exception('Failed to load the required dataset, please check the name_dataset and other printed information!')
else:
return example |
def __init__(self, ids=None, all=None):
'ReopenChatsBulkInputObject - a model defined in Swagger'
self._ids = None
self._all = None
self.discriminator = None
if (ids is not None):
self.ids = ids
if (all is not None):
self.all = all | -6,425,159,068,889,383,000 | ReopenChatsBulkInputObject - a model defined in Swagger | TextMagic/models/reopen_chats_bulk_input_object.py | __init__ | imissyouso/textmagic-rest-python | python | def __init__(self, ids=None, all=None):
self._ids = None
self._all = None
self.discriminator = None
if (ids is not None):
self.ids = ids
if (all is not None):
self.all = all |
@property
def ids(self):
'Gets the ids of this ReopenChatsBulkInputObject. # noqa: E501\n\n Entity ID(s), separated by comma # noqa: E501\n\n :return: The ids of this ReopenChatsBulkInputObject. # noqa: E501\n :rtype: str\n '
return self._ids | 2,342,970,719,951,745,500 | Gets the ids of this ReopenChatsBulkInputObject. # noqa: E501
Entity ID(s), separated by comma # noqa: E501
:return: The ids of this ReopenChatsBulkInputObject. # noqa: E501
:rtype: str | TextMagic/models/reopen_chats_bulk_input_object.py | ids | imissyouso/textmagic-rest-python | python | @property
def ids(self):
'Gets the ids of this ReopenChatsBulkInputObject. # noqa: E501\n\n Entity ID(s), separated by comma # noqa: E501\n\n :return: The ids of this ReopenChatsBulkInputObject. # noqa: E501\n :rtype: str\n '
return self._ids |
@ids.setter
def ids(self, ids):
'Sets the ids of this ReopenChatsBulkInputObject.\n\n Entity ID(s), separated by comma # noqa: E501\n\n :param ids: The ids of this ReopenChatsBulkInputObject. # noqa: E501\n :type: str\n '
self._ids = ids | 2,659,760,497,009,166,300 | Sets the ids of this ReopenChatsBulkInputObject.
Entity ID(s), separated by comma # noqa: E501
:param ids: The ids of this ReopenChatsBulkInputObject. # noqa: E501
:type: str | TextMagic/models/reopen_chats_bulk_input_object.py | ids | imissyouso/textmagic-rest-python | python | @ids.setter
def ids(self, ids):
'Sets the ids of this ReopenChatsBulkInputObject.\n\n Entity ID(s), separated by comma # noqa: E501\n\n :param ids: The ids of this ReopenChatsBulkInputObject. # noqa: E501\n :type: str\n '
self._ids = ids |
@property
def all(self):
'Gets the all of this ReopenChatsBulkInputObject. # noqa: E501\n\n Entity ID(s), separated by comma # noqa: E501\n\n :return: The all of this ReopenChatsBulkInputObject. # noqa: E501\n :rtype: bool\n '
return self._all | -6,900,661,991,682,295,000 | Gets the all of this ReopenChatsBulkInputObject. # noqa: E501
Entity ID(s), separated by comma # noqa: E501
:return: The all of this ReopenChatsBulkInputObject. # noqa: E501
:rtype: bool | TextMagic/models/reopen_chats_bulk_input_object.py | all | imissyouso/textmagic-rest-python | python | @property
def all(self):
'Gets the all of this ReopenChatsBulkInputObject. # noqa: E501\n\n Entity ID(s), separated by comma # noqa: E501\n\n :return: The all of this ReopenChatsBulkInputObject. # noqa: E501\n :rtype: bool\n '
return self._all |
@all.setter
def all(self, all):
'Sets the all of this ReopenChatsBulkInputObject.\n\n Entity ID(s), separated by comma # noqa: E501\n\n :param all: The all of this ReopenChatsBulkInputObject. # noqa: E501\n :type: bool\n '
self._all = all | -2,739,928,420,086,622,700 | Sets the all of this ReopenChatsBulkInputObject.
Entity ID(s), separated by comma # noqa: E501
:param all: The all of this ReopenChatsBulkInputObject. # noqa: E501
:type: bool | TextMagic/models/reopen_chats_bulk_input_object.py | all | imissyouso/textmagic-rest-python | python | @all.setter
def all(self, all):
'Sets the all of this ReopenChatsBulkInputObject.\n\n Entity ID(s), separated by comma # noqa: E501\n\n :param all: The all of this ReopenChatsBulkInputObject. # noqa: E501\n :type: bool\n '
self._all = all |
def to_dict(self):
'Returns the model properties as a dict'
result = {}
for (attr, _) in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value))
elif hasattr(value, 'to_dict'):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items()))
else:
result[attr] = value
if issubclass(ReopenChatsBulkInputObject, dict):
for (key, value) in self.items():
result[key] = value
return result | -7,274,731,641,055,568,000 | Returns the model properties as a dict | TextMagic/models/reopen_chats_bulk_input_object.py | to_dict | imissyouso/textmagic-rest-python | python | def to_dict(self):
result = {}
for (attr, _) in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value))
elif hasattr(value, 'to_dict'):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items()))
else:
result[attr] = value
if issubclass(ReopenChatsBulkInputObject, dict):
for (key, value) in self.items():
result[key] = value
return result |
def to_str(self):
'Returns the string representation of the model'
return pprint.pformat(self.to_dict()) | 5,849,158,643,760,736,000 | Returns the string representation of the model | TextMagic/models/reopen_chats_bulk_input_object.py | to_str | imissyouso/textmagic-rest-python | python | def to_str(self):
return pprint.pformat(self.to_dict()) |
def __repr__(self):
'For `print` and `pprint`'
return self.to_str() | -8,960,031,694,814,905,000 | For `print` and `pprint` | TextMagic/models/reopen_chats_bulk_input_object.py | __repr__ | imissyouso/textmagic-rest-python | python | def __repr__(self):
return self.to_str() |
def __eq__(self, other):
'Returns true if both objects are equal'
if (not isinstance(other, ReopenChatsBulkInputObject)):
return False
return (self.__dict__ == other.__dict__) | 2,073,027,835,143,942,100 | Returns true if both objects are equal | TextMagic/models/reopen_chats_bulk_input_object.py | __eq__ | imissyouso/textmagic-rest-python | python | def __eq__(self, other):
if (not isinstance(other, ReopenChatsBulkInputObject)):
return False
return (self.__dict__ == other.__dict__) |
def __ne__(self, other):
'Returns true if both objects are not equal'
return (not (self == other)) | 7,764,124,047,908,058,000 | Returns true if both objects are not equal | TextMagic/models/reopen_chats_bulk_input_object.py | __ne__ | imissyouso/textmagic-rest-python | python | def __ne__(self, other):
return (not (self == other)) |
def test_existingfile(self):
'Test the value-type interface for existing files.'
self.assertTrue(isinstance(test_init_file, ExistingFile))
self.assertFalse(isinstance(example_dir, ExistingFile))
self.assertTrue((ExistingFile(test_init_file) == test_init_file))
self.assertRaises(TypeError, ExistingFile, 12)
self.assertRaises(IOError, ExistingFile, 'wargarbl')
self.assertRaises(IOError, ExistingFile, nonexistant_dir) | 8,421,076,213,231,692,000 | Test the value-type interface for existing files. | funkyvalidate/tests/test_interfaces.py | test_existingfile | OaklandPeters/funkyvalidate | python | def test_existingfile(self):
self.assertTrue(isinstance(test_init_file, ExistingFile))
self.assertFalse(isinstance(example_dir, ExistingFile))
self.assertTrue((ExistingFile(test_init_file) == test_init_file))
self.assertRaises(TypeError, ExistingFile, 12)
self.assertRaises(IOError, ExistingFile, 'wargarbl')
self.assertRaises(IOError, ExistingFile, nonexistant_dir) |
def test_also_class(self):
'\n AlsoClass does not meet the interface as a class, but does once instantiated.\n '
self.assertFalse(meets(AlsoClass, MyInterface))
self.assertTrue(meets(also, MyInterface))
self.assertTrue(isinstance(also, MyInterface))
self.assertFalse(issubclass(AlsoClass, MyInterface)) | -6,897,244,198,216,044,000 | AlsoClass does not meet the interface as a class, but does once instantiated. | funkyvalidate/tests/test_interfaces.py | test_also_class | OaklandPeters/funkyvalidate | python | def test_also_class(self):
'\n \n '
self.assertFalse(meets(AlsoClass, MyInterface))
self.assertTrue(meets(also, MyInterface))
self.assertTrue(isinstance(also, MyInterface))
self.assertFalse(issubclass(AlsoClass, MyInterface)) |
def test_yes_class(self):
'Meets interface'
self.assertTrue(meets(YesClass, MyInterface))
self.assertTrue(meets(yes, MyInterface))
self.assertTrue(isinstance(yes, MyInterface))
self.assertTrue(issubclass(YesClass, MyInterface)) | 2,077,394,470,763,701,800 | Meets interface | funkyvalidate/tests/test_interfaces.py | test_yes_class | OaklandPeters/funkyvalidate | python | def test_yes_class(self):
self.assertTrue(meets(YesClass, MyInterface))
self.assertTrue(meets(yes, MyInterface))
self.assertTrue(isinstance(yes, MyInterface))
self.assertTrue(issubclass(YesClass, MyInterface)) |
def test_no_class(self):
'Does not meet interface.'
self.assertFalse(meets(NoClass, MyInterface))
self.assertFalse(meets(no, MyInterface))
self.assertFalse(isinstance(no, MyInterface))
self.assertFalse(issubclass(NoClass, MyInterface)) | -2,814,324,874,312,452,600 | Does not meet interface. | funkyvalidate/tests/test_interfaces.py | test_no_class | OaklandPeters/funkyvalidate | python | def test_no_class(self):
self.assertFalse(meets(NoClass, MyInterface))
self.assertFalse(meets(no, MyInterface))
self.assertFalse(isinstance(no, MyInterface))
self.assertFalse(issubclass(NoClass, MyInterface)) |
def test_weird_class(self):
'Meets interface as class, but not as instance.\n This is strange - not something that would normally ever happen.'
self.assertTrue(meets(WeirdClass, MyInterface))
self.assertFalse(meets(weird, MyInterface))
self.assertFalse(isinstance(weird, MyInterface))
self.assertTrue(issubclass(WeirdClass, MyInterface)) | 2,899,825,016,811,275,000 | Meets interface as class, but not as instance.
This is strange - not something that would normally ever happen. | funkyvalidate/tests/test_interfaces.py | test_weird_class | OaklandPeters/funkyvalidate | python | def test_weird_class(self):
'Meets interface as class, but not as instance.\n This is strange - not something that would normally ever happen.'
self.assertTrue(meets(WeirdClass, MyInterface))
self.assertFalse(meets(weird, MyInterface))
self.assertFalse(isinstance(weird, MyInterface))
self.assertTrue(issubclass(WeirdClass, MyInterface)) |
def test_first_child_class(self):
"First child inherits MyInterface, but does not implement\n it at all - so it can't be implemented."
self.assertFalse(meets(FirstChild, MyInterface))
self.assertFalse(issubclass(FirstChild, MyInterface))
self.assertRaises(TypeError, FirstChild) | -8,241,776,168,538,306,000 | First child inherits MyInterface, but does not implement
it at all - so it can't be implemented. | funkyvalidate/tests/test_interfaces.py | test_first_child_class | OaklandPeters/funkyvalidate | python | def test_first_child_class(self):
"First child inherits MyInterface, but does not implement\n it at all - so it can't be implemented."
self.assertFalse(meets(FirstChild, MyInterface))
self.assertFalse(issubclass(FirstChild, MyInterface))
self.assertRaises(TypeError, FirstChild) |
def test_second_child_class(self):
'Meets the interface inherited from its parent.'
self.assertTrue(meets(SecondChild, MyInterface))
self.assertTrue(meets(second_child, MyInterface))
self.assertTrue(isinstance(second_child, MyInterface))
self.assertTrue(issubclass(SecondChild, MyInterface)) | 8,394,721,034,577,703,000 | Meets the interface inherited from its parent. | funkyvalidate/tests/test_interfaces.py | test_second_child_class | OaklandPeters/funkyvalidate | python | def test_second_child_class(self):
self.assertTrue(meets(SecondChild, MyInterface))
self.assertTrue(meets(second_child, MyInterface))
self.assertTrue(isinstance(second_child, MyInterface))
self.assertTrue(issubclass(SecondChild, MyInterface)) |
def test_commutative(self):
'\n AlsoClass does not meet the interface as a class, but does once instantiated.\n '
self.assertFalse(meets(CommutativeFirst, MyInterface))
self.assertTrue(meets(CommutativeSecond, MyInterface))
self.assertTrue(meets(commutative, MyInterface))
self.assertTrue(isinstance(commutative, MyInterface))
self.assertFalse(issubclass(CommutativeFirst, MyInterface))
self.assertTrue(issubclass(CommutativeSecond, MyInterface))
self.assertRaises(TypeError, CommutativeFails) | -284,991,011,918,751,420 | AlsoClass does not meet the interface as a class, but does once instantiated. | funkyvalidate/tests/test_interfaces.py | test_commutative | OaklandPeters/funkyvalidate | python | def test_commutative(self):
'\n \n '
self.assertFalse(meets(CommutativeFirst, MyInterface))
self.assertTrue(meets(CommutativeSecond, MyInterface))
self.assertTrue(meets(commutative, MyInterface))
self.assertTrue(isinstance(commutative, MyInterface))
self.assertFalse(issubclass(CommutativeFirst, MyInterface))
self.assertTrue(issubclass(CommutativeSecond, MyInterface))
self.assertRaises(TypeError, CommutativeFails) |
def _check_lb_service_on_router(self, resource, event, trigger, payload=None):
'Prevent removing a router GW or deleting a router used by LB'
router_id = payload.resource_id
context = payload.context
nsx_router_id = nsx_db.get_nsx_router_id(context.session, router_id)
if (not nsx_router_id):
return
nsxlib = self.loadbalancer.core_plugin.nsxlib
service_client = nsxlib.load_balancer.service
lb_service = service_client.get_router_lb_service(nsx_router_id)
if lb_service:
msg = (_('Cannot delete a %s as it still has lb service attachment') % resource)
raise n_exc.BadRequest(resource='lbaas-lb', msg=msg)
core_plugin = self.loadbalancer.core_plugin
router_subnets = core_plugin._load_router_subnet_cidrs_from_db(context.elevated(), router_id)
subnet_ids = [subnet['id'] for subnet in router_subnets]
if (subnet_ids and self._get_lb_ports(context.elevated(), subnet_ids)):
msg = (_('Cannot delete a %s as it used by a loadbalancer') % resource)
raise n_exc.BadRequest(resource='lbaas-lb', msg=msg) | 4,377,462,731,415,190,500 | Prevent removing a router GW or deleting a router used by LB | vmware_nsx/services/lbaas/nsx_v3/v2/lb_driver_v2.py | _check_lb_service_on_router | yebinama/vmware-nsx | python | def _check_lb_service_on_router(self, resource, event, trigger, payload=None):
router_id = payload.resource_id
context = payload.context
nsx_router_id = nsx_db.get_nsx_router_id(context.session, router_id)
if (not nsx_router_id):
return
nsxlib = self.loadbalancer.core_plugin.nsxlib
service_client = nsxlib.load_balancer.service
lb_service = service_client.get_router_lb_service(nsx_router_id)
if lb_service:
msg = (_('Cannot delete a %s as it still has lb service attachment') % resource)
raise n_exc.BadRequest(resource='lbaas-lb', msg=msg)
core_plugin = self.loadbalancer.core_plugin
router_subnets = core_plugin._load_router_subnet_cidrs_from_db(context.elevated(), router_id)
subnet_ids = [subnet['id'] for subnet in router_subnets]
if (subnet_ids and self._get_lb_ports(context.elevated(), subnet_ids)):
msg = (_('Cannot delete a %s as it used by a loadbalancer') % resource)
raise n_exc.BadRequest(resource='lbaas-lb', msg=msg) |
@staticmethod
def add_args(parser):
'Add task-specific arguments to the parser.'
parser.add_argument('data', help='colon separated path to data directories list, will be iterated upon during epochs in round-robin manner; however, valid and test data are always in the first directory to avoid the need for repeating them in all directories')
parser.add_argument('-s', '--source-lang', default=None, metavar='SRC', help='source language')
parser.add_argument('-t', '--target-lang', default=None, metavar='TARGET', help='target language')
parser.add_argument('--load-alignments', action='store_true', help='load the binarized alignments')
parser.add_argument('--left-pad-source', default='True', type=str, metavar='BOOL', help='pad the source on the left')
parser.add_argument('--left-pad-target', default='False', type=str, metavar='BOOL', help='pad the target on the left')
parser.add_argument('--max-source-positions', default=1024, type=int, metavar='N', help='max number of tokens in the source sequence')
parser.add_argument('--max-target-positions', default=1024, type=int, metavar='N', help='max number of tokens in the target sequence')
parser.add_argument('--upsample-primary', default=1, type=int, help='amount to upsample primary dataset')
parser.add_argument('--truncate-source', action='store_true', default=False, help='truncate source to max-source-positions')
parser.add_argument('--num-batch-buckets', default=0, type=int, metavar='N', help='if >0, then bucket source and target lengths into N buckets and pad accordingly; this is useful on TPUs to minimize the number of compilations')
parser.add_argument('--eval-bleu', action='store_true', help='evaluation with BLEU scores')
parser.add_argument('--eval-bleu-detok', type=str, default='space', help='detokenize before computing BLEU (e.g., "moses"); required if using --eval-bleu; use "space" to disable detokenization; see fairseq.data.encoders for other options')
parser.add_argument('--eval-bleu-detok-args', type=str, metavar='JSON', help='args for building the tokenizer, if needed')
parser.add_argument('--eval-tokenized-bleu', action='store_true', default=False, help='compute tokenized BLEU instead of sacrebleu')
parser.add_argument('--eval-bleu-remove-bpe', nargs='?', const='@@ ', default=None, help='remove BPE before computing BLEU')
parser.add_argument('--eval-bleu-args', type=str, metavar='JSON', help='generation args for BLUE scoring, e.g., \'{"beam": 4, "lenpen": 0.6}\'')
parser.add_argument('--eval-bleu-print-samples', action='store_true', help='print sample generations during validation') | -838,882,436,355,435,300 | Add task-specific arguments to the parser. | fairseq/tasks/translation.py | add_args | 227514/Supervised-Simultaneous-MT | python | @staticmethod
def add_args(parser):
parser.add_argument('data', help='colon separated path to data directories list, will be iterated upon during epochs in round-robin manner; however, valid and test data are always in the first directory to avoid the need for repeating them in all directories')
parser.add_argument('-s', '--source-lang', default=None, metavar='SRC', help='source language')
parser.add_argument('-t', '--target-lang', default=None, metavar='TARGET', help='target language')
parser.add_argument('--load-alignments', action='store_true', help='load the binarized alignments')
parser.add_argument('--left-pad-source', default='True', type=str, metavar='BOOL', help='pad the source on the left')
parser.add_argument('--left-pad-target', default='False', type=str, metavar='BOOL', help='pad the target on the left')
parser.add_argument('--max-source-positions', default=1024, type=int, metavar='N', help='max number of tokens in the source sequence')
parser.add_argument('--max-target-positions', default=1024, type=int, metavar='N', help='max number of tokens in the target sequence')
parser.add_argument('--upsample-primary', default=1, type=int, help='amount to upsample primary dataset')
parser.add_argument('--truncate-source', action='store_true', default=False, help='truncate source to max-source-positions')
parser.add_argument('--num-batch-buckets', default=0, type=int, metavar='N', help='if >0, then bucket source and target lengths into N buckets and pad accordingly; this is useful on TPUs to minimize the number of compilations')
parser.add_argument('--eval-bleu', action='store_true', help='evaluation with BLEU scores')
parser.add_argument('--eval-bleu-detok', type=str, default='space', help='detokenize before computing BLEU (e.g., "moses"); required if using --eval-bleu; use "space" to disable detokenization; see fairseq.data.encoders for other options')
parser.add_argument('--eval-bleu-detok-args', type=str, metavar='JSON', help='args for building the tokenizer, if needed')
parser.add_argument('--eval-tokenized-bleu', action='store_true', default=False, help='compute tokenized BLEU instead of sacrebleu')
parser.add_argument('--eval-bleu-remove-bpe', nargs='?', const='@@ ', default=None, help='remove BPE before computing BLEU')
parser.add_argument('--eval-bleu-args', type=str, metavar='JSON', help='generation args for BLUE scoring, e.g., \'{"beam": 4, "lenpen": 0.6}\)
parser.add_argument('--eval-bleu-print-samples', action='store_true', help='print sample generations during validation') |
@classmethod
def setup_task(cls, args, **kwargs):
'Setup the task (e.g., load dictionaries).\n\n Args:\n args (argparse.Namespace): parsed command-line arguments\n '
args.left_pad_source = utils.eval_bool(args.left_pad_source)
args.left_pad_target = utils.eval_bool(args.left_pad_target)
paths = utils.split_paths(args.data)
assert (len(paths) > 0)
if ((args.source_lang is None) or (args.target_lang is None)):
(args.source_lang, args.target_lang) = data_utils.infer_language_pair(paths[0])
if ((args.source_lang is None) or (args.target_lang is None)):
raise Exception('Could not infer language pair, please provide it explicitly')
src_dict = cls.load_dictionary(os.path.join(paths[0], 'dict.{}.txt'.format(args.source_lang)))
tgt_dict = cls.load_dictionary(os.path.join(paths[0], 'dict.{}.txt'.format(args.target_lang)))
assert (src_dict.pad() == tgt_dict.pad())
assert (src_dict.eos() == tgt_dict.eos())
assert (src_dict.unk() == tgt_dict.unk())
logger.info('[{}] dictionary: {} types'.format(args.source_lang, len(src_dict)))
logger.info('[{}] dictionary: {} types'.format(args.target_lang, len(tgt_dict)))
return cls(args, src_dict, tgt_dict) | 3,263,692,343,001,087,500 | Setup the task (e.g., load dictionaries).
Args:
args (argparse.Namespace): parsed command-line arguments | fairseq/tasks/translation.py | setup_task | 227514/Supervised-Simultaneous-MT | python | @classmethod
def setup_task(cls, args, **kwargs):
'Setup the task (e.g., load dictionaries).\n\n Args:\n args (argparse.Namespace): parsed command-line arguments\n '
args.left_pad_source = utils.eval_bool(args.left_pad_source)
args.left_pad_target = utils.eval_bool(args.left_pad_target)
paths = utils.split_paths(args.data)
assert (len(paths) > 0)
if ((args.source_lang is None) or (args.target_lang is None)):
(args.source_lang, args.target_lang) = data_utils.infer_language_pair(paths[0])
if ((args.source_lang is None) or (args.target_lang is None)):
raise Exception('Could not infer language pair, please provide it explicitly')
src_dict = cls.load_dictionary(os.path.join(paths[0], 'dict.{}.txt'.format(args.source_lang)))
tgt_dict = cls.load_dictionary(os.path.join(paths[0], 'dict.{}.txt'.format(args.target_lang)))
assert (src_dict.pad() == tgt_dict.pad())
assert (src_dict.eos() == tgt_dict.eos())
assert (src_dict.unk() == tgt_dict.unk())
logger.info('[{}] dictionary: {} types'.format(args.source_lang, len(src_dict)))
logger.info('[{}] dictionary: {} types'.format(args.target_lang, len(tgt_dict)))
return cls(args, src_dict, tgt_dict) |
def load_dataset(self, split, epoch=1, combine=False, **kwargs):
'Load a given dataset split.\n\n Args:\n split (str): name of the split (e.g., train, valid, test)\n '
paths = utils.split_paths(self.args.data)
assert (len(paths) > 0)
if (split != getattr(self.args, 'train_subset', None)):
paths = paths[:1]
data_path = paths[((epoch - 1) % len(paths))]
(src, tgt) = (self.args.source_lang, self.args.target_lang)
self.datasets[split] = load_langpair_dataset(data_path, split, src, self.src_dict, tgt, self.tgt_dict, combine=combine, dataset_impl=self.args.dataset_impl, upsample_primary=self.args.upsample_primary, left_pad_source=self.args.left_pad_source, left_pad_target=self.args.left_pad_target, max_source_positions=self.args.max_source_positions, max_target_positions=self.args.max_target_positions, load_alignments=self.args.load_alignments, truncate_source=self.args.truncate_source, num_buckets=self.args.num_batch_buckets, shuffle=(split != 'test'), pad_to_multiple=self.args.required_seq_len_multiple) | -2,398,614,466,079,708,700 | Load a given dataset split.
Args:
split (str): name of the split (e.g., train, valid, test) | fairseq/tasks/translation.py | load_dataset | 227514/Supervised-Simultaneous-MT | python | def load_dataset(self, split, epoch=1, combine=False, **kwargs):
'Load a given dataset split.\n\n Args:\n split (str): name of the split (e.g., train, valid, test)\n '
paths = utils.split_paths(self.args.data)
assert (len(paths) > 0)
if (split != getattr(self.args, 'train_subset', None)):
paths = paths[:1]
data_path = paths[((epoch - 1) % len(paths))]
(src, tgt) = (self.args.source_lang, self.args.target_lang)
self.datasets[split] = load_langpair_dataset(data_path, split, src, self.src_dict, tgt, self.tgt_dict, combine=combine, dataset_impl=self.args.dataset_impl, upsample_primary=self.args.upsample_primary, left_pad_source=self.args.left_pad_source, left_pad_target=self.args.left_pad_target, max_source_positions=self.args.max_source_positions, max_target_positions=self.args.max_target_positions, load_alignments=self.args.load_alignments, truncate_source=self.args.truncate_source, num_buckets=self.args.num_batch_buckets, shuffle=(split != 'test'), pad_to_multiple=self.args.required_seq_len_multiple) |
def max_positions(self):
'Return the max sentence length allowed by the task.'
return (self.args.max_source_positions, self.args.max_target_positions) | -4,071,174,841,505,560,600 | Return the max sentence length allowed by the task. | fairseq/tasks/translation.py | max_positions | 227514/Supervised-Simultaneous-MT | python | def max_positions(self):
return (self.args.max_source_positions, self.args.max_target_positions) |
@property
def source_dictionary(self):
'Return the source :class:`~fairseq.data.Dictionary`.'
return self.src_dict | -1,949,164,681,595,292,000 | Return the source :class:`~fairseq.data.Dictionary`. | fairseq/tasks/translation.py | source_dictionary | 227514/Supervised-Simultaneous-MT | python | @property
def source_dictionary(self):
return self.src_dict |
@property
def target_dictionary(self):
'Return the target :class:`~fairseq.data.Dictionary`.'
return self.tgt_dict | 6,649,002,282,696,208,000 | Return the target :class:`~fairseq.data.Dictionary`. | fairseq/tasks/translation.py | target_dictionary | 227514/Supervised-Simultaneous-MT | python | @property
def target_dictionary(self):
return self.tgt_dict |
def fix_data(self, string):
'\n fix wrong tabs, spaces and backslashes\n fix @ in email addresses\n '
if (string is None):
return None
string = ' '.join(string.split())
return string.replace('\\', '').replace('|at|', '@').strip() | -2,851,419,339,054,188,000 | fix wrong tabs, spaces and backslashes
fix @ in email addresses | jedeschule/spiders/brandenburg.py | fix_data | MartinGer/jedeschule-scraper | python | def fix_data(self, string):
'\n fix wrong tabs, spaces and backslashes\n fix @ in email addresses\n '
if (string is None):
return None
string = ' '.join(string.split())
return string.replace('\\', ).replace('|at|', '@').strip() |
def _ParseJSON(self, json_str):
'Parses response JSON.'
xssi_prefix = ")]}'\n"
if json_str.startswith(xssi_prefix):
json_str = json_str[len(xssi_prefix):]
return json.loads(json_str) | -2,425,086,583,284,833,000 | Parses response JSON. | grr/gui/api_regression_http.py | _ParseJSON | nickamon/grr | python | def _ParseJSON(self, json_str):
xssi_prefix = ")]}'\n"
if json_str.startswith(xssi_prefix):
json_str = json_str[len(xssi_prefix):]
return json.loads(json_str) |
def _PrepareV1Request(self, method, args=None):
'Prepares API v1 request for a given method and args.'
args_proto = None
if args:
args_proto = args.AsPrimitiveProto()
request = self.connector.BuildRequest(method, args_proto)
request.url = request.url.replace('/api/v2/', '/api/')
if (args and request.data):
body_proto = args.__class__().AsPrimitiveProto()
json_format.Parse(request.data, body_proto)
body_args = args.__class__()
body_args.ParseFromString(body_proto.SerializeToString())
request.data = json.dumps(api_value_renderers.StripTypeInfo(api_value_renderers.RenderValue(body_args)), cls=http_api.JSONEncoderWithRDFPrimitivesSupport)
prepped_request = request.prepare()
return (request, prepped_request) | 9,050,258,339,682,464,000 | Prepares API v1 request for a given method and args. | grr/gui/api_regression_http.py | _PrepareV1Request | nickamon/grr | python | def _PrepareV1Request(self, method, args=None):
args_proto = None
if args:
args_proto = args.AsPrimitiveProto()
request = self.connector.BuildRequest(method, args_proto)
request.url = request.url.replace('/api/v2/', '/api/')
if (args and request.data):
body_proto = args.__class__().AsPrimitiveProto()
json_format.Parse(request.data, body_proto)
body_args = args.__class__()
body_args.ParseFromString(body_proto.SerializeToString())
request.data = json.dumps(api_value_renderers.StripTypeInfo(api_value_renderers.RenderValue(body_args)), cls=http_api.JSONEncoderWithRDFPrimitivesSupport)
prepped_request = request.prepare()
return (request, prepped_request) |
def _PrepareV2Request(self, method, args=None):
'Prepares API v2 request for a given method and args.'
args_proto = None
if args:
args_proto = args.AsPrimitiveProto()
request = self.connector.BuildRequest(method, args_proto)
prepped_request = request.prepare()
return (request, prepped_request) | 468,907,928,239,583,740 | Prepares API v2 request for a given method and args. | grr/gui/api_regression_http.py | _PrepareV2Request | nickamon/grr | python | def _PrepareV2Request(self, method, args=None):
args_proto = None
if args:
args_proto = args.AsPrimitiveProto()
request = self.connector.BuildRequest(method, args_proto)
prepped_request = request.prepare()
return (request, prepped_request) |
def HandleCheck(self, method_metadata, args=None, replace=None):
'Does regression check for given method, args and a replace function.'
if (not replace):
raise ValueError("replace can't be None")
if (self.__class__.api_version == 1):
(request, prepped_request) = self._PrepareV1Request(method_metadata.name, args=args)
elif (self.__class__.api_version == 2):
(request, prepped_request) = self._PrepareV2Request(method_metadata.name, args=args)
else:
raise ValueError('api_version may be only 1 or 2, not %d', flags.FLAGS.api_version)
session = requests.Session()
response = session.send(prepped_request)
check_result = {'url': replace(prepped_request.path_url), 'method': request.method}
if request.data:
request_payload = self._ParseJSON(replace(request.data))
if request_payload:
check_result['request_payload'] = request_payload
if (method_metadata.result_type == api_call_router.RouterMethodMetadata.BINARY_STREAM_RESULT_TYPE):
check_result['response'] = replace(utils.SmartUnicode(response.content))
else:
check_result['response'] = self._ParseJSON(replace(response.content))
if (self.__class__.api_version == 1):
stripped_response = api_value_renderers.StripTypeInfo(check_result['response'])
if (stripped_response != check_result['response']):
check_result['type_stripped_response'] = stripped_response
return check_result | -4,304,341,262,135,034,400 | Does regression check for given method, args and a replace function. | grr/gui/api_regression_http.py | HandleCheck | nickamon/grr | python | def HandleCheck(self, method_metadata, args=None, replace=None):
if (not replace):
raise ValueError("replace can't be None")
if (self.__class__.api_version == 1):
(request, prepped_request) = self._PrepareV1Request(method_metadata.name, args=args)
elif (self.__class__.api_version == 2):
(request, prepped_request) = self._PrepareV2Request(method_metadata.name, args=args)
else:
raise ValueError('api_version may be only 1 or 2, not %d', flags.FLAGS.api_version)
session = requests.Session()
response = session.send(prepped_request)
check_result = {'url': replace(prepped_request.path_url), 'method': request.method}
if request.data:
request_payload = self._ParseJSON(replace(request.data))
if request_payload:
check_result['request_payload'] = request_payload
if (method_metadata.result_type == api_call_router.RouterMethodMetadata.BINARY_STREAM_RESULT_TYPE):
check_result['response'] = replace(utils.SmartUnicode(response.content))
else:
check_result['response'] = self._ParseJSON(replace(response.content))
if (self.__class__.api_version == 1):
stripped_response = api_value_renderers.StripTypeInfo(check_result['response'])
if (stripped_response != check_result['response']):
check_result['type_stripped_response'] = stripped_response
return check_result |
def build_custom_pipeline():
'Builds augmentation pipelines for custom data.\n If you want to do exoteric augmentations, you can just re-write this function.\n Needs to return a dict with the same structure.\n '
pipeline = {'T_train': transforms.Compose([transforms.RandomResizedCrop(size=224, scale=(0.08, 1.0)), transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize(mean=(0.485, 0.456, 0.406), std=(0.228, 0.224, 0.225))]), 'T_val': transforms.Compose([transforms.Resize(256), transforms.CenterCrop(224), transforms.ToTensor(), transforms.Normalize(mean=(0.485, 0.456, 0.406), std=(0.228, 0.224, 0.225))])}
return pipeline | 7,593,910,056,718,802,000 | Builds augmentation pipelines for custom data.
If you want to do exoteric augmentations, you can just re-write this function.
Needs to return a dict with the same structure. | solo/utils/classification_dataloader.py | build_custom_pipeline | fariasfc/solo-learn | python | def build_custom_pipeline():
'Builds augmentation pipelines for custom data.\n If you want to do exoteric augmentations, you can just re-write this function.\n Needs to return a dict with the same structure.\n '
pipeline = {'T_train': transforms.Compose([transforms.RandomResizedCrop(size=224, scale=(0.08, 1.0)), transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize(mean=(0.485, 0.456, 0.406), std=(0.228, 0.224, 0.225))]), 'T_val': transforms.Compose([transforms.Resize(256), transforms.CenterCrop(224), transforms.ToTensor(), transforms.Normalize(mean=(0.485, 0.456, 0.406), std=(0.228, 0.224, 0.225))])}
return pipeline |
def prepare_transforms(dataset: str) -> Tuple[(nn.Module, nn.Module)]:
'Prepares pre-defined train and test transformation pipelines for some datasets.\n\n Args:\n dataset (str): dataset name.\n\n Returns:\n Tuple[nn.Module, nn.Module]: training and validation transformation pipelines.\n '
cifar_pipeline = {'T_train': transforms.Compose([transforms.RandomResizedCrop(size=32, scale=(0.08, 1.0)), transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize((0.4914, 0.4822, 0.4465), (0.247, 0.243, 0.261))]), 'T_val': transforms.Compose([transforms.ToTensor(), transforms.Normalize((0.4914, 0.4822, 0.4465), (0.247, 0.243, 0.261))])}
stl_pipeline = {'T_train': transforms.Compose([transforms.RandomResizedCrop(size=96, scale=(0.08, 1.0)), transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize((0.4914, 0.4823, 0.4466), (0.247, 0.243, 0.261))]), 'T_val': transforms.Compose([transforms.Resize((96, 96)), transforms.ToTensor(), transforms.Normalize((0.4914, 0.4823, 0.4466), (0.247, 0.243, 0.261))])}
imagenet_pipeline = {'T_train': transforms.Compose([transforms.RandomResizedCrop(size=224, scale=(0.08, 1.0)), transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize(mean=(0.485, 0.456, 0.406), std=(0.228, 0.224, 0.225))]), 'T_val': transforms.Compose([transforms.Resize(256), transforms.CenterCrop(224), transforms.ToTensor(), transforms.Normalize(mean=(0.485, 0.456, 0.406), std=(0.228, 0.224, 0.225))])}
custom_pipeline = build_custom_pipeline()
pipelines = {'cifar10': cifar_pipeline, 'cifar100': cifar_pipeline, 'stl10': stl_pipeline, 'imagenet100': imagenet_pipeline, 'imagenet': imagenet_pipeline, 'custom': custom_pipeline}
assert (dataset in pipelines)
pipeline = pipelines[dataset]
T_train = pipeline['T_train']
T_val = pipeline['T_val']
return (T_train, T_val) | 3,955,566,528,712,637,000 | Prepares pre-defined train and test transformation pipelines for some datasets.
Args:
dataset (str): dataset name.
Returns:
Tuple[nn.Module, nn.Module]: training and validation transformation pipelines. | solo/utils/classification_dataloader.py | prepare_transforms | fariasfc/solo-learn | python | def prepare_transforms(dataset: str) -> Tuple[(nn.Module, nn.Module)]:
'Prepares pre-defined train and test transformation pipelines for some datasets.\n\n Args:\n dataset (str): dataset name.\n\n Returns:\n Tuple[nn.Module, nn.Module]: training and validation transformation pipelines.\n '
cifar_pipeline = {'T_train': transforms.Compose([transforms.RandomResizedCrop(size=32, scale=(0.08, 1.0)), transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize((0.4914, 0.4822, 0.4465), (0.247, 0.243, 0.261))]), 'T_val': transforms.Compose([transforms.ToTensor(), transforms.Normalize((0.4914, 0.4822, 0.4465), (0.247, 0.243, 0.261))])}
stl_pipeline = {'T_train': transforms.Compose([transforms.RandomResizedCrop(size=96, scale=(0.08, 1.0)), transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize((0.4914, 0.4823, 0.4466), (0.247, 0.243, 0.261))]), 'T_val': transforms.Compose([transforms.Resize((96, 96)), transforms.ToTensor(), transforms.Normalize((0.4914, 0.4823, 0.4466), (0.247, 0.243, 0.261))])}
imagenet_pipeline = {'T_train': transforms.Compose([transforms.RandomResizedCrop(size=224, scale=(0.08, 1.0)), transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize(mean=(0.485, 0.456, 0.406), std=(0.228, 0.224, 0.225))]), 'T_val': transforms.Compose([transforms.Resize(256), transforms.CenterCrop(224), transforms.ToTensor(), transforms.Normalize(mean=(0.485, 0.456, 0.406), std=(0.228, 0.224, 0.225))])}
custom_pipeline = build_custom_pipeline()
pipelines = {'cifar10': cifar_pipeline, 'cifar100': cifar_pipeline, 'stl10': stl_pipeline, 'imagenet100': imagenet_pipeline, 'imagenet': imagenet_pipeline, 'custom': custom_pipeline}
assert (dataset in pipelines)
pipeline = pipelines[dataset]
T_train = pipeline['T_train']
T_val = pipeline['T_val']
return (T_train, T_val) |
def prepare_datasets(dataset: str, T_train: Callable, T_val: Callable, data_dir: Optional[Union[(str, Path)]]=None, train_dir: Optional[Union[(str, Path)]]=None, val_dir: Optional[Union[(str, Path)]]=None) -> Tuple[(Dataset, Dataset)]:
'Prepares train and val datasets.\n\n Args:\n dataset (str): dataset name.\n T_train (Callable): pipeline of transformations for training dataset.\n T_val (Callable): pipeline of transformations for validation dataset.\n data_dir Optional[Union[str, Path]]: path where to download/locate the dataset.\n train_dir Optional[Union[str, Path]]: subpath where the training data is located.\n val_dir Optional[Union[str, Path]]: subpath where the validation data is located.\n\n Returns:\n Tuple[Dataset, Dataset]: training dataset and validation dataset.\n '
if (data_dir is None):
sandbox_dir = Path(os.path.dirname(os.path.dirname(os.path.realpath(__file__))))
data_dir = (sandbox_dir / 'datasets')
else:
data_dir = Path(data_dir)
if (train_dir is None):
train_dir = Path(f'{dataset}/train')
else:
train_dir = Path(train_dir)
if (val_dir is None):
val_dir = Path(f'{dataset}/val')
else:
val_dir = Path(val_dir)
assert (dataset in ['cifar10', 'cifar100', 'stl10', 'imagenet', 'imagenet100', 'custom'])
if (dataset in ['cifar10', 'cifar100']):
DatasetClass = vars(torchvision.datasets)[dataset.upper()]
train_dataset = DatasetClass((data_dir / train_dir), train=True, download=True, transform=T_train)
val_dataset = DatasetClass((data_dir / val_dir), train=False, download=True, transform=T_val)
elif (dataset == 'stl10'):
train_dataset = STL10((data_dir / train_dir), split='train', download=True, transform=T_train)
val_dataset = STL10((data_dir / val_dir), split='test', download=True, transform=T_val)
elif (dataset in ['imagenet', 'imagenet100', 'custom']):
train_dir = (data_dir / train_dir)
val_dir = (data_dir / val_dir)
train_dataset = ImageFolder(train_dir, T_train)
val_dataset = ImageFolder(val_dir, T_val)
return (train_dataset, val_dataset) | -3,492,522,221,009,874,000 | Prepares train and val datasets.
Args:
dataset (str): dataset name.
T_train (Callable): pipeline of transformations for training dataset.
T_val (Callable): pipeline of transformations for validation dataset.
data_dir Optional[Union[str, Path]]: path where to download/locate the dataset.
train_dir Optional[Union[str, Path]]: subpath where the training data is located.
val_dir Optional[Union[str, Path]]: subpath where the validation data is located.
Returns:
Tuple[Dataset, Dataset]: training dataset and validation dataset. | solo/utils/classification_dataloader.py | prepare_datasets | fariasfc/solo-learn | python | def prepare_datasets(dataset: str, T_train: Callable, T_val: Callable, data_dir: Optional[Union[(str, Path)]]=None, train_dir: Optional[Union[(str, Path)]]=None, val_dir: Optional[Union[(str, Path)]]=None) -> Tuple[(Dataset, Dataset)]:
'Prepares train and val datasets.\n\n Args:\n dataset (str): dataset name.\n T_train (Callable): pipeline of transformations for training dataset.\n T_val (Callable): pipeline of transformations for validation dataset.\n data_dir Optional[Union[str, Path]]: path where to download/locate the dataset.\n train_dir Optional[Union[str, Path]]: subpath where the training data is located.\n val_dir Optional[Union[str, Path]]: subpath where the validation data is located.\n\n Returns:\n Tuple[Dataset, Dataset]: training dataset and validation dataset.\n '
if (data_dir is None):
sandbox_dir = Path(os.path.dirname(os.path.dirname(os.path.realpath(__file__))))
data_dir = (sandbox_dir / 'datasets')
else:
data_dir = Path(data_dir)
if (train_dir is None):
train_dir = Path(f'{dataset}/train')
else:
train_dir = Path(train_dir)
if (val_dir is None):
val_dir = Path(f'{dataset}/val')
else:
val_dir = Path(val_dir)
assert (dataset in ['cifar10', 'cifar100', 'stl10', 'imagenet', 'imagenet100', 'custom'])
if (dataset in ['cifar10', 'cifar100']):
DatasetClass = vars(torchvision.datasets)[dataset.upper()]
train_dataset = DatasetClass((data_dir / train_dir), train=True, download=True, transform=T_train)
val_dataset = DatasetClass((data_dir / val_dir), train=False, download=True, transform=T_val)
elif (dataset == 'stl10'):
train_dataset = STL10((data_dir / train_dir), split='train', download=True, transform=T_train)
val_dataset = STL10((data_dir / val_dir), split='test', download=True, transform=T_val)
elif (dataset in ['imagenet', 'imagenet100', 'custom']):
train_dir = (data_dir / train_dir)
val_dir = (data_dir / val_dir)
train_dataset = ImageFolder(train_dir, T_train)
val_dataset = ImageFolder(val_dir, T_val)
return (train_dataset, val_dataset) |
def prepare_dataloaders(train_dataset: Dataset, val_dataset: Dataset, batch_size: int=64, num_workers: int=4) -> Tuple[(DataLoader, DataLoader)]:
'Wraps a train and a validation dataset with a DataLoader.\n\n Args:\n train_dataset (Dataset): object containing training data.\n val_dataset (Dataset): object containing validation data.\n batch_size (int): batch size.\n num_workers (int): number of parallel workers.\n Returns:\n Tuple[DataLoader, DataLoader]: training dataloader and validation dataloader.\n '
train_loader = DataLoader(train_dataset, batch_size=batch_size, shuffle=True, num_workers=num_workers, pin_memory=True, drop_last=True)
val_loader = DataLoader(val_dataset, batch_size=batch_size, num_workers=num_workers, pin_memory=True, drop_last=False)
return (train_loader, val_loader) | 3,297,220,022,688,808,000 | Wraps a train and a validation dataset with a DataLoader.
Args:
train_dataset (Dataset): object containing training data.
val_dataset (Dataset): object containing validation data.
batch_size (int): batch size.
num_workers (int): number of parallel workers.
Returns:
Tuple[DataLoader, DataLoader]: training dataloader and validation dataloader. | solo/utils/classification_dataloader.py | prepare_dataloaders | fariasfc/solo-learn | python | def prepare_dataloaders(train_dataset: Dataset, val_dataset: Dataset, batch_size: int=64, num_workers: int=4) -> Tuple[(DataLoader, DataLoader)]:
'Wraps a train and a validation dataset with a DataLoader.\n\n Args:\n train_dataset (Dataset): object containing training data.\n val_dataset (Dataset): object containing validation data.\n batch_size (int): batch size.\n num_workers (int): number of parallel workers.\n Returns:\n Tuple[DataLoader, DataLoader]: training dataloader and validation dataloader.\n '
train_loader = DataLoader(train_dataset, batch_size=batch_size, shuffle=True, num_workers=num_workers, pin_memory=True, drop_last=True)
val_loader = DataLoader(val_dataset, batch_size=batch_size, num_workers=num_workers, pin_memory=True, drop_last=False)
return (train_loader, val_loader) |
def prepare_data(dataset: str, transform: Optional[Callable]=None, data_dir: Optional[Union[(str, Path)]]=None, train_dir: Optional[Union[(str, Path)]]=None, val_dir: Optional[Union[(str, Path)]]=None, batch_size: int=64, num_workers: int=4) -> Tuple[(DataLoader, DataLoader)]:
'Prepares transformations, creates dataset objects and wraps them in dataloaders.\n\n Args:\n dataset (str): dataset name.\n data_dir (Optional[Union[str, Path]], optional): path where to download/locate the dataset.\n Defaults to None.\n train_dir (Optional[Union[str, Path]], optional): subpath where the\n training data is located. Defaults to None.\n val_dir (Optional[Union[str, Path]], optional): subpath where the\n validation data is located. Defaults to None.\n batch_size (int, optional): batch size. Defaults to 64.\n num_workers (int, optional): number of parallel workers. Defaults to 4.\n\n Returns:\n Tuple[DataLoader, DataLoader]: prepared training and validation dataloader;.\n '
if (transform is None):
(T_train, T_val) = prepare_transforms(dataset)
else:
T_train = transform
T_val = transform
(train_dataset, val_dataset) = prepare_datasets(dataset, T_train, T_val, data_dir=data_dir, train_dir=train_dir, val_dir=val_dir)
(train_loader, val_loader) = prepare_dataloaders(train_dataset, val_dataset, batch_size=batch_size, num_workers=num_workers)
return (train_loader, val_loader) | 6,450,852,906,138,120,000 | Prepares transformations, creates dataset objects and wraps them in dataloaders.
Args:
dataset (str): dataset name.
data_dir (Optional[Union[str, Path]], optional): path where to download/locate the dataset.
Defaults to None.
train_dir (Optional[Union[str, Path]], optional): subpath where the
training data is located. Defaults to None.
val_dir (Optional[Union[str, Path]], optional): subpath where the
validation data is located. Defaults to None.
batch_size (int, optional): batch size. Defaults to 64.
num_workers (int, optional): number of parallel workers. Defaults to 4.
Returns:
Tuple[DataLoader, DataLoader]: prepared training and validation dataloader;. | solo/utils/classification_dataloader.py | prepare_data | fariasfc/solo-learn | python | def prepare_data(dataset: str, transform: Optional[Callable]=None, data_dir: Optional[Union[(str, Path)]]=None, train_dir: Optional[Union[(str, Path)]]=None, val_dir: Optional[Union[(str, Path)]]=None, batch_size: int=64, num_workers: int=4) -> Tuple[(DataLoader, DataLoader)]:
'Prepares transformations, creates dataset objects and wraps them in dataloaders.\n\n Args:\n dataset (str): dataset name.\n data_dir (Optional[Union[str, Path]], optional): path where to download/locate the dataset.\n Defaults to None.\n train_dir (Optional[Union[str, Path]], optional): subpath where the\n training data is located. Defaults to None.\n val_dir (Optional[Union[str, Path]], optional): subpath where the\n validation data is located. Defaults to None.\n batch_size (int, optional): batch size. Defaults to 64.\n num_workers (int, optional): number of parallel workers. Defaults to 4.\n\n Returns:\n Tuple[DataLoader, DataLoader]: prepared training and validation dataloader;.\n '
if (transform is None):
(T_train, T_val) = prepare_transforms(dataset)
else:
T_train = transform
T_val = transform
(train_dataset, val_dataset) = prepare_datasets(dataset, T_train, T_val, data_dir=data_dir, train_dir=train_dir, val_dir=val_dir)
(train_loader, val_loader) = prepare_dataloaders(train_dataset, val_dataset, batch_size=batch_size, num_workers=num_workers)
return (train_loader, val_loader) |
@bot.slash()
async def counter(ctx: commands.Context):
'Starts a counter for pressing.'
(await ctx.send('Press!', view=EphemeralCounter())) | -4,987,862,953,905,894,000 | Starts a counter for pressing. | examples/views/ephemeral.py | counter | NextChai/discord.py | python | @bot.slash()
async def counter(ctx: commands.Context):
(await ctx.send('Press!', view=EphemeralCounter())) |
def build_fsm_spec_4_state(direction_logic_value):
'Build an FSM spec with 4 states.\n\n The FSM built has 2 inputs, 1 output, and 4 states. It acts like a \n 2-bit counter, where the output goes to high only if the FSM is in the \n final state.\n\n When the direction pin is low, the counter counts up; if it is high, the\n counter counts down.\n\n Parameters\n ----------\n direction_logic_value : int\n The logic value of the direction pin.\n Returns\n -------\n dict\n The FSM spec that can be consumed by the FSM generator.\n list\n The output pattern corresponding to the direction value.\n list\n The state bit0 pattern corresponding to the direction value.\n list\n The state bit1 pattern corresponding to the direction value.\n\n '
(out, rst, direction) = list(pin_dict.keys())[0:3]
fsm_spec_4_state = {'inputs': [('rst', rst), ('direction', direction)], 'outputs': [('test', out)], 'states': ['S0', 'S1', 'S2', 'S3'], 'transitions': [['00', 'S0', 'S1', '0'], ['01', 'S0', 'S3', '0'], ['00', 'S1', 'S2', '0'], ['01', 'S1', 'S0', '0'], ['00', 'S2', 'S3', '0'], ['01', 'S2', 'S1', '0'], ['00', 'S3', 'S0', '1'], ['01', 'S3', 'S2', '1'], ['1-', '*', 'S0', '']]}
if (not direction_logic_value):
output_pattern = [0, 0, 0, 1]
state_bit0_pattern = [0, 1, 0, 1]
state_bit1_pattern = [0, 0, 1, 1]
else:
output_pattern = [0, 1, 0, 0]
state_bit0_pattern = [0, 1, 0, 1]
state_bit1_pattern = [0, 1, 1, 0]
return (fsm_spec_4_state, output_pattern, state_bit0_pattern, state_bit1_pattern) | -1,682,010,598,897,035,800 | Build an FSM spec with 4 states.
The FSM built has 2 inputs, 1 output, and 4 states. It acts like a
2-bit counter, where the output goes to high only if the FSM is in the
final state.
When the direction pin is low, the counter counts up; if it is high, the
counter counts down.
Parameters
----------
direction_logic_value : int
The logic value of the direction pin.
Returns
-------
dict
The FSM spec that can be consumed by the FSM generator.
list
The output pattern corresponding to the direction value.
list
The state bit0 pattern corresponding to the direction value.
list
The state bit1 pattern corresponding to the direction value. | pynq/lib/logictools/tests/test_fsm_generator.py | build_fsm_spec_4_state | AbinMM/PYNQ | python | def build_fsm_spec_4_state(direction_logic_value):
'Build an FSM spec with 4 states.\n\n The FSM built has 2 inputs, 1 output, and 4 states. It acts like a \n 2-bit counter, where the output goes to high only if the FSM is in the \n final state.\n\n When the direction pin is low, the counter counts up; if it is high, the\n counter counts down.\n\n Parameters\n ----------\n direction_logic_value : int\n The logic value of the direction pin.\n Returns\n -------\n dict\n The FSM spec that can be consumed by the FSM generator.\n list\n The output pattern corresponding to the direction value.\n list\n The state bit0 pattern corresponding to the direction value.\n list\n The state bit1 pattern corresponding to the direction value.\n\n '
(out, rst, direction) = list(pin_dict.keys())[0:3]
fsm_spec_4_state = {'inputs': [('rst', rst), ('direction', direction)], 'outputs': [('test', out)], 'states': ['S0', 'S1', 'S2', 'S3'], 'transitions': [['00', 'S0', 'S1', '0'], ['01', 'S0', 'S3', '0'], ['00', 'S1', 'S2', '0'], ['01', 'S1', 'S0', '0'], ['00', 'S2', 'S3', '0'], ['01', 'S2', 'S1', '0'], ['00', 'S3', 'S0', '1'], ['01', 'S3', 'S2', '1'], ['1-', '*', 'S0', ]]}
if (not direction_logic_value):
output_pattern = [0, 0, 0, 1]
state_bit0_pattern = [0, 1, 0, 1]
state_bit1_pattern = [0, 0, 1, 1]
else:
output_pattern = [0, 1, 0, 0]
state_bit0_pattern = [0, 1, 0, 1]
state_bit1_pattern = [0, 1, 1, 0]
return (fsm_spec_4_state, output_pattern, state_bit0_pattern, state_bit1_pattern) |
def build_fsm_spec_random(num_states):
'Build an FSM spec with the specified number of states.\n\n The FSM spec exploits only single input and single output. As a side \n product, a list of output patterns are also returned.\n\n Parameters\n ----------\n num_states : int\n The number of states of the FSM.\n\n Returns\n -------\n dict\n The FSM spec that can be consumed by the FSM generator.\n list\n The output patterns associated with this FSM spec.\n\n '
(input_pin, output_pin) = list(pin_dict.keys())[0:2]
if (num_states == 1):
return ({'inputs': [('rst', input_pin)], 'outputs': [('test', output_pin)], 'states': ['S0'], 'transitions': [['1', '*', 'S0', '']]}, None)
else:
fsm_spec_state = {'inputs': [('rst', input_pin)], 'outputs': [('test', output_pin)], 'states': [], 'transitions': [['1', '*', 'S0', '']]}
output_pattern_list = list()
for i in range(num_states):
current_state = 'S{}'.format(i)
next_state = 'S{}'.format(((i + 1) % num_states))
fsm_spec_state['states'] += [current_state]
output_pattern = '{}'.format(randint(0, 1))
transition = ['0', current_state, next_state, output_pattern]
fsm_spec_state['transitions'] += [transition]
output_pattern_list.append(int(output_pattern))
return (fsm_spec_state, output_pattern_list) | -6,833,447,999,151,086,000 | Build an FSM spec with the specified number of states.
The FSM spec exploits only single input and single output. As a side
product, a list of output patterns are also returned.
Parameters
----------
num_states : int
The number of states of the FSM.
Returns
-------
dict
The FSM spec that can be consumed by the FSM generator.
list
The output patterns associated with this FSM spec. | pynq/lib/logictools/tests/test_fsm_generator.py | build_fsm_spec_random | AbinMM/PYNQ | python | def build_fsm_spec_random(num_states):
'Build an FSM spec with the specified number of states.\n\n The FSM spec exploits only single input and single output. As a side \n product, a list of output patterns are also returned.\n\n Parameters\n ----------\n num_states : int\n The number of states of the FSM.\n\n Returns\n -------\n dict\n The FSM spec that can be consumed by the FSM generator.\n list\n The output patterns associated with this FSM spec.\n\n '
(input_pin, output_pin) = list(pin_dict.keys())[0:2]
if (num_states == 1):
return ({'inputs': [('rst', input_pin)], 'outputs': [('test', output_pin)], 'states': ['S0'], 'transitions': [['1', '*', 'S0', ]]}, None)
else:
fsm_spec_state = {'inputs': [('rst', input_pin)], 'outputs': [('test', output_pin)], 'states': [], 'transitions': [['1', '*', 'S0', ]]}
output_pattern_list = list()
for i in range(num_states):
current_state = 'S{}'.format(i)
next_state = 'S{}'.format(((i + 1) % num_states))
fsm_spec_state['states'] += [current_state]
output_pattern = '{}'.format(randint(0, 1))
transition = ['0', current_state, next_state, output_pattern]
fsm_spec_state['transitions'] += [transition]
output_pattern_list.append(int(output_pattern))
return (fsm_spec_state, output_pattern_list) |
def build_fsm_spec_max_in_out():
'Build an FSM spec using a maximum number of inputs and outputs.\n\n The returned FSM spec has a maximum number of inputs and \n outputs. At the same time, the largest available number of \n states will be implemented. For example, on PYNQ-Z1, if \n FSM_MAX_INPUT_BITS = 8, and FSM_MAX_STATE_INPUT_BITS = 13, we will \n implement 2**(13-8)-1 = 31 states. This is the largest number of states \n available for this setup, since there is always 1 dummy state that has\n to be reserved.\n\n Returns\n -------\n dict\n The FSM spec that can be consumed by the FSM generator.\n list\n The output patterns associated with this FSM spec.\n\n '
input_pins = list(pin_dict.keys())[:FSM_MAX_INPUT_BITS]
output_pins = list(pin_dict.keys())[FSM_MAX_INPUT_BITS:interface_width]
fsm_spec_inout = {'inputs': [], 'outputs': [], 'states': [], 'transitions': [[('1' * len(input_pins)), '*', 'S0', '']]}
test_lanes = [[] for _ in range(len(output_pins))]
num_states = ((2 ** (FSM_MAX_STATE_INPUT_BITS - FSM_MAX_INPUT_BITS)) - 1)
for i in range(len(input_pins)):
fsm_spec_inout['inputs'].append(('input{}'.format(i), input_pins[i]))
for i in range(len(output_pins)):
fsm_spec_inout['outputs'].append(('output{}'.format(i), output_pins[i]))
for i in range(num_states):
current_state = 'S{}'.format(i)
next_state = 'S{}'.format(((i + 1) % num_states))
fsm_spec_inout['states'].append(current_state)
output_pattern = ''
for test_lane in test_lanes:
random_1bit = '{}'.format(randint(0, 1))
output_pattern += random_1bit
test_lane += random_1bit
transition = [('0' * len(input_pins)), current_state, next_state, output_pattern]
fsm_spec_inout['transitions'].append(transition)
test_patterns = []
for i in range(len(output_pins)):
temp_string = ''.join(test_lanes[i])
test_patterns.append(np.array(bitstring_to_int(wave_to_bitstring(temp_string))))
return (fsm_spec_inout, test_patterns) | -3,612,341,223,182,315,500 | Build an FSM spec using a maximum number of inputs and outputs.
The returned FSM spec has a maximum number of inputs and
outputs. At the same time, the largest available number of
states will be implemented. For example, on PYNQ-Z1, if
FSM_MAX_INPUT_BITS = 8, and FSM_MAX_STATE_INPUT_BITS = 13, we will
implement 2**(13-8)-1 = 31 states. This is the largest number of states
available for this setup, since there is always 1 dummy state that has
to be reserved.
Returns
-------
dict
The FSM spec that can be consumed by the FSM generator.
list
The output patterns associated with this FSM spec. | pynq/lib/logictools/tests/test_fsm_generator.py | build_fsm_spec_max_in_out | AbinMM/PYNQ | python | def build_fsm_spec_max_in_out():
'Build an FSM spec using a maximum number of inputs and outputs.\n\n The returned FSM spec has a maximum number of inputs and \n outputs. At the same time, the largest available number of \n states will be implemented. For example, on PYNQ-Z1, if \n FSM_MAX_INPUT_BITS = 8, and FSM_MAX_STATE_INPUT_BITS = 13, we will \n implement 2**(13-8)-1 = 31 states. This is the largest number of states \n available for this setup, since there is always 1 dummy state that has\n to be reserved.\n\n Returns\n -------\n dict\n The FSM spec that can be consumed by the FSM generator.\n list\n The output patterns associated with this FSM spec.\n\n '
input_pins = list(pin_dict.keys())[:FSM_MAX_INPUT_BITS]
output_pins = list(pin_dict.keys())[FSM_MAX_INPUT_BITS:interface_width]
fsm_spec_inout = {'inputs': [], 'outputs': [], 'states': [], 'transitions': [[('1' * len(input_pins)), '*', 'S0', ]]}
test_lanes = [[] for _ in range(len(output_pins))]
num_states = ((2 ** (FSM_MAX_STATE_INPUT_BITS - FSM_MAX_INPUT_BITS)) - 1)
for i in range(len(input_pins)):
fsm_spec_inout['inputs'].append(('input{}'.format(i), input_pins[i]))
for i in range(len(output_pins)):
fsm_spec_inout['outputs'].append(('output{}'.format(i), output_pins[i]))
for i in range(num_states):
current_state = 'S{}'.format(i)
next_state = 'S{}'.format(((i + 1) % num_states))
fsm_spec_inout['states'].append(current_state)
output_pattern =
for test_lane in test_lanes:
random_1bit = '{}'.format(randint(0, 1))
output_pattern += random_1bit
test_lane += random_1bit
transition = [('0' * len(input_pins)), current_state, next_state, output_pattern]
fsm_spec_inout['transitions'].append(transition)
test_patterns = []
for i in range(len(output_pins)):
temp_string = .join(test_lanes[i])
test_patterns.append(np.array(bitstring_to_int(wave_to_bitstring(temp_string))))
return (fsm_spec_inout, test_patterns) |
def build_fsm_spec_free_run():
'Build a spec that results in a free-running FSM.\n\n This will return an FSM spec with no given inputs.\n In this case, the FSM is a free running state machine. \n A maximum number of states are deployed.\n\n Returns\n -------\n dict\n The FSM spec that can be consumed by the FSM generator.\n list\n The output patterns associated with this FSM spec.\n\n '
input_pin = list(pin_dict.keys())[0]
output_pins = list(pin_dict.keys())[1:interface_width]
fsm_spec_inout = {'inputs': [], 'outputs': [], 'states': [], 'transitions': []}
test_lanes = [[] for _ in range(len(output_pins))]
num_states = FSM_MAX_NUM_STATES
fsm_spec_inout['inputs'].append(('input0', input_pin))
for i in range(len(output_pins)):
fsm_spec_inout['outputs'].append(('output{}'.format(i), output_pins[i]))
for i in range(num_states):
current_state = 'S{}'.format(i)
next_state = 'S{}'.format(((i + 1) % num_states))
fsm_spec_inout['states'].append(current_state)
output_pattern = ''
for test_lane in test_lanes:
random_1bit = '{}'.format(randint(0, 1))
output_pattern += random_1bit
test_lane += random_1bit
transition = ['-', current_state, next_state, output_pattern]
fsm_spec_inout['transitions'].append(transition)
test_patterns = []
for i in range(len(output_pins)):
temp_string = ''.join(test_lanes[i])
test_patterns.append(np.array(bitstring_to_int(wave_to_bitstring(temp_string))))
return (fsm_spec_inout, test_patterns) | 6,033,142,832,981,971,000 | Build a spec that results in a free-running FSM.
This will return an FSM spec with no given inputs.
In this case, the FSM is a free running state machine.
A maximum number of states are deployed.
Returns
-------
dict
The FSM spec that can be consumed by the FSM generator.
list
The output patterns associated with this FSM spec. | pynq/lib/logictools/tests/test_fsm_generator.py | build_fsm_spec_free_run | AbinMM/PYNQ | python | def build_fsm_spec_free_run():
'Build a spec that results in a free-running FSM.\n\n This will return an FSM spec with no given inputs.\n In this case, the FSM is a free running state machine. \n A maximum number of states are deployed.\n\n Returns\n -------\n dict\n The FSM spec that can be consumed by the FSM generator.\n list\n The output patterns associated with this FSM spec.\n\n '
input_pin = list(pin_dict.keys())[0]
output_pins = list(pin_dict.keys())[1:interface_width]
fsm_spec_inout = {'inputs': [], 'outputs': [], 'states': [], 'transitions': []}
test_lanes = [[] for _ in range(len(output_pins))]
num_states = FSM_MAX_NUM_STATES
fsm_spec_inout['inputs'].append(('input0', input_pin))
for i in range(len(output_pins)):
fsm_spec_inout['outputs'].append(('output{}'.format(i), output_pins[i]))
for i in range(num_states):
current_state = 'S{}'.format(i)
next_state = 'S{}'.format(((i + 1) % num_states))
fsm_spec_inout['states'].append(current_state)
output_pattern =
for test_lane in test_lanes:
random_1bit = '{}'.format(randint(0, 1))
output_pattern += random_1bit
test_lane += random_1bit
transition = ['-', current_state, next_state, output_pattern]
fsm_spec_inout['transitions'].append(transition)
test_patterns = []
for i in range(len(output_pins)):
temp_string = .join(test_lanes[i])
test_patterns.append(np.array(bitstring_to_int(wave_to_bitstring(temp_string))))
return (fsm_spec_inout, test_patterns) |
@pytest.mark.skipif((not flag), reason='need correct overlay to run')
def test_fsm_num_samples():
'Test for the Finite State Machine Generator class.\n\n In this test, the pattern generated by the FSM will be compared with the \n one specified. We will test a minimum number of (FSM period + 1) samples,\n and a maximum number of samples. 10MHz and 100MHz clocks are tested\n for each case.\n\n '
ol.download()
(rst, direction) = list(pin_dict.keys())[1:3]
print('\nConnect {} to GND, and {} to VCC.'.format(rst, direction))
input('Hit enter after done ...')
(fsm_spec_4_state, output_pattern, _, _) = build_fsm_spec_4_state(1)
fsm_period = len(fsm_spec_4_state['states'])
for num_samples in [fsm_period, MAX_NUM_TRACE_SAMPLES]:
test_tile = np.array(output_pattern)
golden_test_array = np.tile(test_tile, ceil((num_samples / 4)))
for fsm_frequency_mhz in [10, 100]:
fsm_generator = FSMGenerator(mb_info)
assert (fsm_generator.status == 'RESET')
fsm_generator.trace(use_analyzer=True, num_analyzer_samples=num_samples)
fsm_generator.setup(fsm_spec_4_state, frequency_mhz=fsm_frequency_mhz)
assert (fsm_generator.status == 'READY')
assert ('bram_data_buf' not in fsm_generator.logictools_controller.buffers), 'bram_data_buf is not freed after use.'
fsm_generator.run()
assert (fsm_generator.status == 'RUNNING')
test_string = ''
for wavegroup in fsm_generator.waveform.waveform_dict['signal']:
if (wavegroup and (wavegroup[0] == 'analysis')):
for wavelane in wavegroup[1:]:
if (wavelane['name'] == 'test'):
test_string = wavelane['wave']
test_array = np.array(bitstring_to_int(wave_to_bitstring(test_string)))
assert np.array_equal(test_array, golden_test_array[:num_samples]), 'Data pattern not correct when running at {}MHz.'.format(fsm_frequency_mhz)
fsm_generator.stop()
assert (fsm_generator.status == 'READY')
fsm_generator.reset()
assert (fsm_generator.status == 'RESET')
del fsm_generator | -4,553,244,922,994,970,600 | Test for the Finite State Machine Generator class.
In this test, the pattern generated by the FSM will be compared with the
one specified. We will test a minimum number of (FSM period + 1) samples,
and a maximum number of samples. 10MHz and 100MHz clocks are tested
for each case. | pynq/lib/logictools/tests/test_fsm_generator.py | test_fsm_num_samples | AbinMM/PYNQ | python | @pytest.mark.skipif((not flag), reason='need correct overlay to run')
def test_fsm_num_samples():
'Test for the Finite State Machine Generator class.\n\n In this test, the pattern generated by the FSM will be compared with the \n one specified. We will test a minimum number of (FSM period + 1) samples,\n and a maximum number of samples. 10MHz and 100MHz clocks are tested\n for each case.\n\n '
ol.download()
(rst, direction) = list(pin_dict.keys())[1:3]
print('\nConnect {} to GND, and {} to VCC.'.format(rst, direction))
input('Hit enter after done ...')
(fsm_spec_4_state, output_pattern, _, _) = build_fsm_spec_4_state(1)
fsm_period = len(fsm_spec_4_state['states'])
for num_samples in [fsm_period, MAX_NUM_TRACE_SAMPLES]:
test_tile = np.array(output_pattern)
golden_test_array = np.tile(test_tile, ceil((num_samples / 4)))
for fsm_frequency_mhz in [10, 100]:
fsm_generator = FSMGenerator(mb_info)
assert (fsm_generator.status == 'RESET')
fsm_generator.trace(use_analyzer=True, num_analyzer_samples=num_samples)
fsm_generator.setup(fsm_spec_4_state, frequency_mhz=fsm_frequency_mhz)
assert (fsm_generator.status == 'READY')
assert ('bram_data_buf' not in fsm_generator.logictools_controller.buffers), 'bram_data_buf is not freed after use.'
fsm_generator.run()
assert (fsm_generator.status == 'RUNNING')
test_string =
for wavegroup in fsm_generator.waveform.waveform_dict['signal']:
if (wavegroup and (wavegroup[0] == 'analysis')):
for wavelane in wavegroup[1:]:
if (wavelane['name'] == 'test'):
test_string = wavelane['wave']
test_array = np.array(bitstring_to_int(wave_to_bitstring(test_string)))
assert np.array_equal(test_array, golden_test_array[:num_samples]), 'Data pattern not correct when running at {}MHz.'.format(fsm_frequency_mhz)
fsm_generator.stop()
assert (fsm_generator.status == 'READY')
fsm_generator.reset()
assert (fsm_generator.status == 'RESET')
del fsm_generator |
@pytest.mark.skipif((not flag), reason='need correct overlay to run')
def test_fsm_state_bits():
'Test for the Finite State Machine Generator class.\n\n This test is similar to the first test, but in this test,\n we will test the case when the state bits are also used as outputs.\n\n '
ol.download()
(rst, direction) = list(pin_dict.keys())[1:3]
print('\nConnect both {} and {} to GND.'.format(rst, direction))
input('Hit enter after done ...')
(fsm_spec_4_state, output_pattern, state_bit0_pattern, state_bit1_pattern) = build_fsm_spec_4_state(0)
fsm_period = len(fsm_spec_4_state['states'])
golden_test_array = np.array(output_pattern)
golden_state_bit0_array = np.array(state_bit0_pattern)
golden_state_bit1_array = np.array(state_bit1_pattern)
for fsm_frequency_mhz in [10, 100]:
fsm_generator = FSMGenerator(mb_info)
fsm_generator.trace(use_analyzer=True, num_analyzer_samples=fsm_period)
fsm_generator.setup(fsm_spec_4_state, use_state_bits=True, frequency_mhz=fsm_frequency_mhz)
fsm_generator.run()
test_string = state_bit0_string = state_bit1_string = ''
for wavegroup in fsm_generator.waveform.waveform_dict['signal']:
if (wavegroup and (wavegroup[0] == 'analysis')):
for wavelane in wavegroup[1:]:
if (wavelane['name'] == 'test'):
test_string = wavelane['wave']
if (wavelane['name'] == 'state_bit0'):
state_bit0_string = wavelane['wave']
if (wavelane['name'] == 'state_bit1'):
state_bit1_string = wavelane['wave']
test_array = np.array(bitstring_to_int(wave_to_bitstring(test_string)))
state_bit0_array = np.array(bitstring_to_int(wave_to_bitstring(state_bit0_string)))
state_bit1_array = np.array(bitstring_to_int(wave_to_bitstring(state_bit1_string)))
assert np.array_equal(golden_test_array, test_array), 'Data pattern not correct when running at {}MHz.'.format(fsm_frequency_mhz)
assert np.array_equal(golden_state_bit0_array, state_bit0_array), 'State bit0 not correct when running at {}MHz.'.format(fsm_frequency_mhz)
assert np.array_equal(golden_state_bit1_array, state_bit1_array), 'State bit1 not correct when running at {}MHz.'.format(fsm_frequency_mhz)
fsm_generator.stop()
fsm_generator.reset()
del fsm_generator | 6,340,016,362,553,178,000 | Test for the Finite State Machine Generator class.
This test is similar to the first test, but in this test,
we will test the case when the state bits are also used as outputs. | pynq/lib/logictools/tests/test_fsm_generator.py | test_fsm_state_bits | AbinMM/PYNQ | python | @pytest.mark.skipif((not flag), reason='need correct overlay to run')
def test_fsm_state_bits():
'Test for the Finite State Machine Generator class.\n\n This test is similar to the first test, but in this test,\n we will test the case when the state bits are also used as outputs.\n\n '
ol.download()
(rst, direction) = list(pin_dict.keys())[1:3]
print('\nConnect both {} and {} to GND.'.format(rst, direction))
input('Hit enter after done ...')
(fsm_spec_4_state, output_pattern, state_bit0_pattern, state_bit1_pattern) = build_fsm_spec_4_state(0)
fsm_period = len(fsm_spec_4_state['states'])
golden_test_array = np.array(output_pattern)
golden_state_bit0_array = np.array(state_bit0_pattern)
golden_state_bit1_array = np.array(state_bit1_pattern)
for fsm_frequency_mhz in [10, 100]:
fsm_generator = FSMGenerator(mb_info)
fsm_generator.trace(use_analyzer=True, num_analyzer_samples=fsm_period)
fsm_generator.setup(fsm_spec_4_state, use_state_bits=True, frequency_mhz=fsm_frequency_mhz)
fsm_generator.run()
test_string = state_bit0_string = state_bit1_string =
for wavegroup in fsm_generator.waveform.waveform_dict['signal']:
if (wavegroup and (wavegroup[0] == 'analysis')):
for wavelane in wavegroup[1:]:
if (wavelane['name'] == 'test'):
test_string = wavelane['wave']
if (wavelane['name'] == 'state_bit0'):
state_bit0_string = wavelane['wave']
if (wavelane['name'] == 'state_bit1'):
state_bit1_string = wavelane['wave']
test_array = np.array(bitstring_to_int(wave_to_bitstring(test_string)))
state_bit0_array = np.array(bitstring_to_int(wave_to_bitstring(state_bit0_string)))
state_bit1_array = np.array(bitstring_to_int(wave_to_bitstring(state_bit1_string)))
assert np.array_equal(golden_test_array, test_array), 'Data pattern not correct when running at {}MHz.'.format(fsm_frequency_mhz)
assert np.array_equal(golden_state_bit0_array, state_bit0_array), 'State bit0 not correct when running at {}MHz.'.format(fsm_frequency_mhz)
assert np.array_equal(golden_state_bit1_array, state_bit1_array), 'State bit1 not correct when running at {}MHz.'.format(fsm_frequency_mhz)
fsm_generator.stop()
fsm_generator.reset()
del fsm_generator |
@pytest.mark.skipif((not flag), reason='need correct overlay to run')
def test_fsm_step():
'Test for the Finite State Machine Generator class.\n\n This test is similar to the above test, but in this test,\n we will test the `step()` method, and ask users to change the input\n logic values in the middle of the test.\n\n '
ol.download()
(rst, direction) = list(pin_dict.keys())[1:3]
print('')
(fsm_spec_4_state, output_pattern_up, state_bit0_pattern_up, state_bit1_pattern_up) = build_fsm_spec_4_state(0)
(_, output_pattern_down, state_bit0_pattern_down, state_bit1_pattern_down) = build_fsm_spec_4_state(1)
output_pattern_down.append(output_pattern_down.pop(0))
state_bit0_pattern_down.append(state_bit0_pattern_down.pop(0))
state_bit1_pattern_down.append(state_bit1_pattern_down.pop(0))
fsm_period = len(fsm_spec_4_state['states'])
golden_test_array = np.array((output_pattern_up + output_pattern_down[1:]))
golden_state_bit0_array = np.array((state_bit0_pattern_up + state_bit0_pattern_down[1:]))
golden_state_bit1_array = np.array((state_bit1_pattern_up + state_bit1_pattern_down[1:]))
for fsm_frequency_mhz in [10, 100]:
fsm_generator = FSMGenerator(mb_info)
fsm_generator.trace(use_analyzer=True, num_analyzer_samples=fsm_period)
fsm_generator.setup(fsm_spec_4_state, use_state_bits=True, frequency_mhz=fsm_frequency_mhz)
print('Connect both {} and {} to GND.'.format(rst, direction))
input('Hit enter after done ...')
for _ in range((len(output_pattern_up) - 1)):
fsm_generator.step()
print('Connect {} to GND, and {} to VCC.'.format(rst, direction))
input('Hit enter after done ...')
for _ in range(len(output_pattern_down)):
fsm_generator.step()
test_string = state_bit0_string = state_bit1_string = ''
for wavegroup in fsm_generator.waveform.waveform_dict['signal']:
if (wavegroup and (wavegroup[0] == 'analysis')):
for wavelane in wavegroup[1:]:
if (wavelane['name'] == 'test'):
test_string = wavelane['wave']
if (wavelane['name'] == 'state_bit0'):
state_bit0_string = wavelane['wave']
if (wavelane['name'] == 'state_bit1'):
state_bit1_string = wavelane['wave']
test_array = np.array(bitstring_to_int(wave_to_bitstring(test_string)))
state_bit0_array = np.array(bitstring_to_int(wave_to_bitstring(state_bit0_string)))
state_bit1_array = np.array(bitstring_to_int(wave_to_bitstring(state_bit1_string)))
assert np.array_equal(golden_test_array, test_array), 'Data pattern not correct when stepping at {}MHz.'.format(fsm_frequency_mhz)
assert np.array_equal(golden_state_bit0_array, state_bit0_array), 'State bit0 not correct when stepping at {}MHz.'.format(fsm_frequency_mhz)
assert np.array_equal(golden_state_bit1_array, state_bit1_array), 'State bit1 not correct when stepping at {}MHz.'.format(fsm_frequency_mhz)
fsm_generator.stop()
fsm_generator.reset()
del fsm_generator | 3,564,277,518,620,787,700 | Test for the Finite State Machine Generator class.
This test is similar to the above test, but in this test,
we will test the `step()` method, and ask users to change the input
logic values in the middle of the test. | pynq/lib/logictools/tests/test_fsm_generator.py | test_fsm_step | AbinMM/PYNQ | python | @pytest.mark.skipif((not flag), reason='need correct overlay to run')
def test_fsm_step():
'Test for the Finite State Machine Generator class.\n\n This test is similar to the above test, but in this test,\n we will test the `step()` method, and ask users to change the input\n logic values in the middle of the test.\n\n '
ol.download()
(rst, direction) = list(pin_dict.keys())[1:3]
print()
(fsm_spec_4_state, output_pattern_up, state_bit0_pattern_up, state_bit1_pattern_up) = build_fsm_spec_4_state(0)
(_, output_pattern_down, state_bit0_pattern_down, state_bit1_pattern_down) = build_fsm_spec_4_state(1)
output_pattern_down.append(output_pattern_down.pop(0))
state_bit0_pattern_down.append(state_bit0_pattern_down.pop(0))
state_bit1_pattern_down.append(state_bit1_pattern_down.pop(0))
fsm_period = len(fsm_spec_4_state['states'])
golden_test_array = np.array((output_pattern_up + output_pattern_down[1:]))
golden_state_bit0_array = np.array((state_bit0_pattern_up + state_bit0_pattern_down[1:]))
golden_state_bit1_array = np.array((state_bit1_pattern_up + state_bit1_pattern_down[1:]))
for fsm_frequency_mhz in [10, 100]:
fsm_generator = FSMGenerator(mb_info)
fsm_generator.trace(use_analyzer=True, num_analyzer_samples=fsm_period)
fsm_generator.setup(fsm_spec_4_state, use_state_bits=True, frequency_mhz=fsm_frequency_mhz)
print('Connect both {} and {} to GND.'.format(rst, direction))
input('Hit enter after done ...')
for _ in range((len(output_pattern_up) - 1)):
fsm_generator.step()
print('Connect {} to GND, and {} to VCC.'.format(rst, direction))
input('Hit enter after done ...')
for _ in range(len(output_pattern_down)):
fsm_generator.step()
test_string = state_bit0_string = state_bit1_string =
for wavegroup in fsm_generator.waveform.waveform_dict['signal']:
if (wavegroup and (wavegroup[0] == 'analysis')):
for wavelane in wavegroup[1:]:
if (wavelane['name'] == 'test'):
test_string = wavelane['wave']
if (wavelane['name'] == 'state_bit0'):
state_bit0_string = wavelane['wave']
if (wavelane['name'] == 'state_bit1'):
state_bit1_string = wavelane['wave']
test_array = np.array(bitstring_to_int(wave_to_bitstring(test_string)))
state_bit0_array = np.array(bitstring_to_int(wave_to_bitstring(state_bit0_string)))
state_bit1_array = np.array(bitstring_to_int(wave_to_bitstring(state_bit1_string)))
assert np.array_equal(golden_test_array, test_array), 'Data pattern not correct when stepping at {}MHz.'.format(fsm_frequency_mhz)
assert np.array_equal(golden_state_bit0_array, state_bit0_array), 'State bit0 not correct when stepping at {}MHz.'.format(fsm_frequency_mhz)
assert np.array_equal(golden_state_bit1_array, state_bit1_array), 'State bit1 not correct when stepping at {}MHz.'.format(fsm_frequency_mhz)
fsm_generator.stop()
fsm_generator.reset()
del fsm_generator |
@pytest.mark.skipif((not flag), reason='need correct overlay to run')
def test_fsm_no_trace():
'Test for the Finite State Machine Generator class.\n\n This is similar to the first test, but in this test,\n we will test the case when no analyzer is specified.\n\n '
ol.download()
(fsm_spec_4_state, _, _, _) = build_fsm_spec_4_state(0)
fsm_generator = FSMGenerator(mb_info)
fsm_generator.trace(use_analyzer=False)
fsm_generator.setup(fsm_spec_4_state)
fsm_generator.run()
exception_raised = False
try:
fsm_generator.show_waveform()
except ValueError:
exception_raised = True
assert exception_raised, 'Should raise exception for show_waveform().'
fsm_generator.reset()
del fsm_generator | -8,809,294,722,410,691,000 | Test for the Finite State Machine Generator class.
This is similar to the first test, but in this test,
we will test the case when no analyzer is specified. | pynq/lib/logictools/tests/test_fsm_generator.py | test_fsm_no_trace | AbinMM/PYNQ | python | @pytest.mark.skipif((not flag), reason='need correct overlay to run')
def test_fsm_no_trace():
'Test for the Finite State Machine Generator class.\n\n This is similar to the first test, but in this test,\n we will test the case when no analyzer is specified.\n\n '
ol.download()
(fsm_spec_4_state, _, _, _) = build_fsm_spec_4_state(0)
fsm_generator = FSMGenerator(mb_info)
fsm_generator.trace(use_analyzer=False)
fsm_generator.setup(fsm_spec_4_state)
fsm_generator.run()
exception_raised = False
try:
fsm_generator.show_waveform()
except ValueError:
exception_raised = True
assert exception_raised, 'Should raise exception for show_waveform().'
fsm_generator.reset()
del fsm_generator |
@pytest.mark.skipif((not flag), reason='need correct overlay to run')
def test_fsm_num_states1():
'Test for the Finite State Machine Generator class.\n\n The 4th test will check 1 and (MAX_NUM_STATES + 1) states. \n These cases should raise exceptions. For these tests, we use the minimum \n number of input and output pins.\n\n '
ol.download()
fsm_generator = None
exception_raised = False
(fsm_spec_less_than_min_state, _) = build_fsm_spec_random((FSM_MIN_NUM_STATES - 1))
(fsm_spec_more_than_max_state, _) = build_fsm_spec_random((FSM_MAX_NUM_STATES + 1))
for fsm_spec in [fsm_spec_less_than_min_state, fsm_spec_more_than_max_state]:
num_states = len(fsm_spec['states'])
try:
fsm_generator = FSMGenerator(mb_info)
fsm_generator.trace(use_analyzer=True, num_analyzer_samples=MAX_NUM_TRACE_SAMPLES)
fsm_generator.setup(fsm_spec)
except ValueError:
exception_raised = True
assert exception_raised, 'Should raise exception when there are {} states in the FSM.'.format(num_states)
fsm_generator.reset()
del fsm_generator | -8,275,885,558,516,988,000 | Test for the Finite State Machine Generator class.
The 4th test will check 1 and (MAX_NUM_STATES + 1) states.
These cases should raise exceptions. For these tests, we use the minimum
number of input and output pins. | pynq/lib/logictools/tests/test_fsm_generator.py | test_fsm_num_states1 | AbinMM/PYNQ | python | @pytest.mark.skipif((not flag), reason='need correct overlay to run')
def test_fsm_num_states1():
'Test for the Finite State Machine Generator class.\n\n The 4th test will check 1 and (MAX_NUM_STATES + 1) states. \n These cases should raise exceptions. For these tests, we use the minimum \n number of input and output pins.\n\n '
ol.download()
fsm_generator = None
exception_raised = False
(fsm_spec_less_than_min_state, _) = build_fsm_spec_random((FSM_MIN_NUM_STATES - 1))
(fsm_spec_more_than_max_state, _) = build_fsm_spec_random((FSM_MAX_NUM_STATES + 1))
for fsm_spec in [fsm_spec_less_than_min_state, fsm_spec_more_than_max_state]:
num_states = len(fsm_spec['states'])
try:
fsm_generator = FSMGenerator(mb_info)
fsm_generator.trace(use_analyzer=True, num_analyzer_samples=MAX_NUM_TRACE_SAMPLES)
fsm_generator.setup(fsm_spec)
except ValueError:
exception_raised = True
assert exception_raised, 'Should raise exception when there are {} states in the FSM.'.format(num_states)
fsm_generator.reset()
del fsm_generator |
@pytest.mark.skipif((not flag), reason='need correct overlay to run')
def test_fsm_num_states2():
'Test for the Finite State Machine Generator class.\n\n This test will check 2 and MAX_NUM_STATES states. \n These cases should be able to pass random tests. \n For these tests, we use the minimum number of input and output pins.\n\n '
ol.download()
input_pin = list(pin_dict.keys())[0]
print('\nConnect {} to GND, and disconnect other pins.'.format(input_pin))
input('Hit enter after done ...')
for num_states in [2, FSM_MAX_NUM_STATES]:
(fsm_spec, test_pattern) = build_fsm_spec_random(num_states)
fsm_generator = FSMGenerator(mb_info)
fsm_generator.trace(use_analyzer=True, num_analyzer_samples=MAX_NUM_TRACE_SAMPLES)
fsm_generator.setup(fsm_spec, frequency_mhz=100)
fsm_generator.run()
test_string = ''
for wavegroup in fsm_generator.waveform.waveform_dict['signal']:
if (wavegroup and (wavegroup[0] == 'analysis')):
for wavelane in wavegroup[1:]:
if (wavelane['name'] == 'test'):
test_string = wavelane['wave']
test_array = np.array(bitstring_to_int(wave_to_bitstring(test_string)))
period = num_states
test_tile = np.array(test_pattern)
golden_test_array = np.tile(test_tile, ceil((MAX_NUM_TRACE_SAMPLES / period)))
assert np.array_equal(test_array, golden_test_array[:MAX_NUM_TRACE_SAMPLES]), 'Analysis not matching the generated pattern.'
fsm_generator.stop()
fsm_generator.reset()
del fsm_generator | -5,891,241,888,667,011,000 | Test for the Finite State Machine Generator class.
This test will check 2 and MAX_NUM_STATES states.
These cases should be able to pass random tests.
For these tests, we use the minimum number of input and output pins. | pynq/lib/logictools/tests/test_fsm_generator.py | test_fsm_num_states2 | AbinMM/PYNQ | python | @pytest.mark.skipif((not flag), reason='need correct overlay to run')
def test_fsm_num_states2():
'Test for the Finite State Machine Generator class.\n\n This test will check 2 and MAX_NUM_STATES states. \n These cases should be able to pass random tests. \n For these tests, we use the minimum number of input and output pins.\n\n '
ol.download()
input_pin = list(pin_dict.keys())[0]
print('\nConnect {} to GND, and disconnect other pins.'.format(input_pin))
input('Hit enter after done ...')
for num_states in [2, FSM_MAX_NUM_STATES]:
(fsm_spec, test_pattern) = build_fsm_spec_random(num_states)
fsm_generator = FSMGenerator(mb_info)
fsm_generator.trace(use_analyzer=True, num_analyzer_samples=MAX_NUM_TRACE_SAMPLES)
fsm_generator.setup(fsm_spec, frequency_mhz=100)
fsm_generator.run()
test_string =
for wavegroup in fsm_generator.waveform.waveform_dict['signal']:
if (wavegroup and (wavegroup[0] == 'analysis')):
for wavelane in wavegroup[1:]:
if (wavelane['name'] == 'test'):
test_string = wavelane['wave']
test_array = np.array(bitstring_to_int(wave_to_bitstring(test_string)))
period = num_states
test_tile = np.array(test_pattern)
golden_test_array = np.tile(test_tile, ceil((MAX_NUM_TRACE_SAMPLES / period)))
assert np.array_equal(test_array, golden_test_array[:MAX_NUM_TRACE_SAMPLES]), 'Analysis not matching the generated pattern.'
fsm_generator.stop()
fsm_generator.reset()
del fsm_generator |
@pytest.mark.skipif((not flag), reason='need correct overlay to run')
def test_fsm_max_in_out():
'Test for the Finite State Machine Generator class.\n\n This test will test when maximum number of inputs and \n outputs are used. At the same time, the largest available number of \n states will be implemented.\n\n '
ol.download()
input_pins = list(pin_dict.keys())[:FSM_MAX_INPUT_BITS]
print('\nConnect {} to GND.'.format(input_pins))
print('Disconnect all other pins.')
input('Hit enter after done ...')
(fsm_spec_inout, test_patterns) = build_fsm_spec_max_in_out()
period = ((2 ** (FSM_MAX_STATE_INPUT_BITS - FSM_MAX_INPUT_BITS)) - 1)
num_output_pins = (interface_width - FSM_MAX_INPUT_BITS)
fsm_generator = FSMGenerator(mb_info)
fsm_generator.trace(use_analyzer=True, num_analyzer_samples=MAX_NUM_TRACE_SAMPLES)
fsm_generator.setup(fsm_spec_inout, frequency_mhz=100)
fsm_generator.run()
test_strings = ['' for _ in range(num_output_pins)]
test_arrays = [[] for _ in range(num_output_pins)]
for wavegroup in fsm_generator.waveform.waveform_dict['signal']:
if (wavegroup and (wavegroup[0] == 'analysis')):
for wavelane in wavegroup[1:]:
for j in range(num_output_pins):
if (wavelane['name'] == 'output{}'.format(j)):
test_strings[j] = wavelane['wave']
test_arrays[j] = np.array(bitstring_to_int(wave_to_bitstring(test_strings[j])))
break
golden_arrays = [[] for _ in range(num_output_pins)]
for i in range(num_output_pins):
golden_arrays[i] = np.tile(test_patterns[i], ceil((MAX_NUM_TRACE_SAMPLES / period)))
assert np.array_equal(test_arrays[i], golden_arrays[i][:MAX_NUM_TRACE_SAMPLES]), 'Output{} not matching the generated pattern.'.format(i)
fsm_generator.stop()
fsm_generator.reset()
del fsm_generator | 6,678,319,828,326,196,000 | Test for the Finite State Machine Generator class.
This test will test when maximum number of inputs and
outputs are used. At the same time, the largest available number of
states will be implemented. | pynq/lib/logictools/tests/test_fsm_generator.py | test_fsm_max_in_out | AbinMM/PYNQ | python | @pytest.mark.skipif((not flag), reason='need correct overlay to run')
def test_fsm_max_in_out():
'Test for the Finite State Machine Generator class.\n\n This test will test when maximum number of inputs and \n outputs are used. At the same time, the largest available number of \n states will be implemented.\n\n '
ol.download()
input_pins = list(pin_dict.keys())[:FSM_MAX_INPUT_BITS]
print('\nConnect {} to GND.'.format(input_pins))
print('Disconnect all other pins.')
input('Hit enter after done ...')
(fsm_spec_inout, test_patterns) = build_fsm_spec_max_in_out()
period = ((2 ** (FSM_MAX_STATE_INPUT_BITS - FSM_MAX_INPUT_BITS)) - 1)
num_output_pins = (interface_width - FSM_MAX_INPUT_BITS)
fsm_generator = FSMGenerator(mb_info)
fsm_generator.trace(use_analyzer=True, num_analyzer_samples=MAX_NUM_TRACE_SAMPLES)
fsm_generator.setup(fsm_spec_inout, frequency_mhz=100)
fsm_generator.run()
test_strings = [ for _ in range(num_output_pins)]
test_arrays = [[] for _ in range(num_output_pins)]
for wavegroup in fsm_generator.waveform.waveform_dict['signal']:
if (wavegroup and (wavegroup[0] == 'analysis')):
for wavelane in wavegroup[1:]:
for j in range(num_output_pins):
if (wavelane['name'] == 'output{}'.format(j)):
test_strings[j] = wavelane['wave']
test_arrays[j] = np.array(bitstring_to_int(wave_to_bitstring(test_strings[j])))
break
golden_arrays = [[] for _ in range(num_output_pins)]
for i in range(num_output_pins):
golden_arrays[i] = np.tile(test_patterns[i], ceil((MAX_NUM_TRACE_SAMPLES / period)))
assert np.array_equal(test_arrays[i], golden_arrays[i][:MAX_NUM_TRACE_SAMPLES]), 'Output{} not matching the generated pattern.'.format(i)
fsm_generator.stop()
fsm_generator.reset()
del fsm_generator |
@pytest.mark.skipif((not flag), reason='need correct overlay to run')
def test_fsm_free_run():
"Test for the Finite State Machine Generator class.\n\n This will examine a special scenario where no inputs are given.\n In this case, the FSM is a free running state machine. Since the FSM \n specification requires at least 1 input pin to be specified, 1 pin can \n be used as `don't care` input, while all the other pins are used as \n outputs. A maximum number of states are deployed.\n\n "
ol.download()
print('\nDisconnect all the pins.')
input('Hit enter after done ...')
(fsm_spec_inout, test_patterns) = build_fsm_spec_free_run()
period = FSM_MAX_NUM_STATES
num_output_pins = (interface_width - 1)
fsm_generator = FSMGenerator(mb_info)
fsm_generator.trace(use_analyzer=True, num_analyzer_samples=period)
fsm_generator.setup(fsm_spec_inout, frequency_mhz=100)
fsm_generator.run()
test_strings = ['' for _ in range(num_output_pins)]
test_arrays = [[] for _ in range(num_output_pins)]
for wavegroup in fsm_generator.waveform.waveform_dict['signal']:
if (wavegroup and (wavegroup[0] == 'analysis')):
for wavelane in wavegroup[1:]:
for j in range(num_output_pins):
if (wavelane['name'] == 'output{}'.format(j)):
test_strings[j] = wavelane['wave']
test_arrays[j] = np.array(bitstring_to_int(wave_to_bitstring(test_strings[j])))
break
golden_arrays = test_patterns
for i in range(num_output_pins):
assert np.array_equal(test_arrays[i], golden_arrays[i]), 'Output{} not matching the generated pattern.'.format(i)
fsm_generator.stop()
fsm_generator.reset()
del fsm_generator | -6,702,892,957,634,023,000 | Test for the Finite State Machine Generator class.
This will examine a special scenario where no inputs are given.
In this case, the FSM is a free running state machine. Since the FSM
specification requires at least 1 input pin to be specified, 1 pin can
be used as `don't care` input, while all the other pins are used as
outputs. A maximum number of states are deployed. | pynq/lib/logictools/tests/test_fsm_generator.py | test_fsm_free_run | AbinMM/PYNQ | python | @pytest.mark.skipif((not flag), reason='need correct overlay to run')
def test_fsm_free_run():
"Test for the Finite State Machine Generator class.\n\n This will examine a special scenario where no inputs are given.\n In this case, the FSM is a free running state machine. Since the FSM \n specification requires at least 1 input pin to be specified, 1 pin can \n be used as `don't care` input, while all the other pins are used as \n outputs. A maximum number of states are deployed.\n\n "
ol.download()
print('\nDisconnect all the pins.')
input('Hit enter after done ...')
(fsm_spec_inout, test_patterns) = build_fsm_spec_free_run()
period = FSM_MAX_NUM_STATES
num_output_pins = (interface_width - 1)
fsm_generator = FSMGenerator(mb_info)
fsm_generator.trace(use_analyzer=True, num_analyzer_samples=period)
fsm_generator.setup(fsm_spec_inout, frequency_mhz=100)
fsm_generator.run()
test_strings = [ for _ in range(num_output_pins)]
test_arrays = [[] for _ in range(num_output_pins)]
for wavegroup in fsm_generator.waveform.waveform_dict['signal']:
if (wavegroup and (wavegroup[0] == 'analysis')):
for wavelane in wavegroup[1:]:
for j in range(num_output_pins):
if (wavelane['name'] == 'output{}'.format(j)):
test_strings[j] = wavelane['wave']
test_arrays[j] = np.array(bitstring_to_int(wave_to_bitstring(test_strings[j])))
break
golden_arrays = test_patterns
for i in range(num_output_pins):
assert np.array_equal(test_arrays[i], golden_arrays[i]), 'Output{} not matching the generated pattern.'.format(i)
fsm_generator.stop()
fsm_generator.reset()
del fsm_generator |
def maxProduct(self, words):
'\n :type words: List[str]\n :rtype: int\n '
wordsDict = {}
for word in words:
wordsDict[word] = set(word)
output = 0
for i in range(len(words)):
for j in range((i + 1), len(words)):
if (not (wordsDict[words[i]] & wordsDict[words[j]])):
output = max(output, (len(words[i]) * len(words[j])))
return output | -701,283,042,098,699,100 | :type words: List[str]
:rtype: int | LeetCode/318 Maximum Product of Word Lengths.py | maxProduct | gesuwen/Algorithms | python | def maxProduct(self, words):
'\n :type words: List[str]\n :rtype: int\n '
wordsDict = {}
for word in words:
wordsDict[word] = set(word)
output = 0
for i in range(len(words)):
for j in range((i + 1), len(words)):
if (not (wordsDict[words[i]] & wordsDict[words[j]])):
output = max(output, (len(words[i]) * len(words[j])))
return output |
def setUp(self):
'Set up gateway.'
self.mock_pub = mock.Mock()
self.mock_sub = mock.Mock()
self.gateway = MQTTGateway(self.mock_pub, self.mock_sub) | 2,470,955,238,168,311,000 | Set up gateway. | tests/test_gateway_mqtt.py | setUp | jslove/pymysensors | python | def setUp(self):
self.mock_pub = mock.Mock()
self.mock_sub = mock.Mock()
self.gateway = MQTTGateway(self.mock_pub, self.mock_sub) |
def tearDown(self):
'Stop MQTTGateway if alive.'
if self.gateway.is_alive():
self.gateway.stop() | -5,073,377,274,422,349,000 | Stop MQTTGateway if alive. | tests/test_gateway_mqtt.py | tearDown | jslove/pymysensors | python | def tearDown(self):
if self.gateway.is_alive():
self.gateway.stop() |
def _add_sensor(self, sensorid):
'Add sensor node. Return sensor node instance.'
self.gateway.sensors[sensorid] = Sensor(sensorid)
return self.gateway.sensors[sensorid] | 8,655,351,742,864,993,000 | Add sensor node. Return sensor node instance. | tests/test_gateway_mqtt.py | _add_sensor | jslove/pymysensors | python | def _add_sensor(self, sensorid):
self.gateway.sensors[sensorid] = Sensor(sensorid)
return self.gateway.sensors[sensorid] |
def test_send(self):
'Test send method.'
self.gateway.send('1;1;1;0;1;20\n')
self.mock_pub.assert_called_with('/1/1/1/0/1', '20', 0, True) | 7,165,929,488,064,309,000 | Test send method. | tests/test_gateway_mqtt.py | test_send | jslove/pymysensors | python | def test_send(self):
self.gateway.send('1;1;1;0;1;20\n')
self.mock_pub.assert_called_with('/1/1/1/0/1', '20', 0, True) |
def test_send_empty_string(self):
'Test send method with empty string.'
self.gateway.send('')
self.assertFalse(self.mock_pub.called) | 7,797,458,741,899,858,000 | Test send method with empty string. | tests/test_gateway_mqtt.py | test_send_empty_string | jslove/pymysensors | python | def test_send_empty_string(self):
self.gateway.send()
self.assertFalse(self.mock_pub.called) |
def test_send_error(self):
'Test send method with error on publish.'
self.mock_pub.side_effect = ValueError('Publish topic cannot contain wildcards.')
with self.assertLogs(level='ERROR') as test_handle:
self.gateway.send('1;1;1;0;1;20\n')
self.mock_pub.assert_called_with('/1/1/1/0/1', '20', 0, True)
self.assertEqual(test_handle.output[0].split('\n', 1)[0], 'ERROR:mysensors.gateway_mqtt:Publish to /1/1/1/0/1 failed: Publish topic cannot contain wildcards.') | -2,151,185,760,036,063,200 | Test send method with error on publish. | tests/test_gateway_mqtt.py | test_send_error | jslove/pymysensors | python | def test_send_error(self):
self.mock_pub.side_effect = ValueError('Publish topic cannot contain wildcards.')
with self.assertLogs(level='ERROR') as test_handle:
self.gateway.send('1;1;1;0;1;20\n')
self.mock_pub.assert_called_with('/1/1/1/0/1', '20', 0, True)
self.assertEqual(test_handle.output[0].split('\n', 1)[0], 'ERROR:mysensors.gateway_mqtt:Publish to /1/1/1/0/1 failed: Publish topic cannot contain wildcards.') |
def test_recv(self):
'Test recv method.'
sensor = self._add_sensor(1)
sensor.children[1] = ChildSensor(1, self.gateway.const.Presentation.S_HUM)
sensor.children[1].values[self.gateway.const.SetReq.V_HUM] = '20'
self.gateway.recv('/1/1/2/0/1', '', 0)
ret = self.gateway.handle_queue()
self.assertEqual(ret, '1;1;1;0;1;20\n')
self.gateway.recv('/1/1/2/0/1', '', 1)
ret = self.gateway.handle_queue()
self.assertEqual(ret, '1;1;1;1;1;20\n') | -1,142,446,973,514,419,600 | Test recv method. | tests/test_gateway_mqtt.py | test_recv | jslove/pymysensors | python | def test_recv(self):
sensor = self._add_sensor(1)
sensor.children[1] = ChildSensor(1, self.gateway.const.Presentation.S_HUM)
sensor.children[1].values[self.gateway.const.SetReq.V_HUM] = '20'
self.gateway.recv('/1/1/2/0/1', , 0)
ret = self.gateway.handle_queue()
self.assertEqual(ret, '1;1;1;0;1;20\n')
self.gateway.recv('/1/1/2/0/1', , 1)
ret = self.gateway.handle_queue()
self.assertEqual(ret, '1;1;1;1;1;20\n') |
def test_recv_wrong_prefix(self):
'Test recv method with wrong topic prefix.'
sensor = self._add_sensor(1)
sensor.children[1] = ChildSensor(1, self.gateway.const.Presentation.S_HUM)
sensor.children[1].values[self.gateway.const.SetReq.V_HUM] = '20'
self.gateway.recv('wrong/1/1/2/0/1', '', 0)
ret = self.gateway.handle_queue()
self.assertEqual(ret, None) | -589,491,616,878,513,800 | Test recv method with wrong topic prefix. | tests/test_gateway_mqtt.py | test_recv_wrong_prefix | jslove/pymysensors | python | def test_recv_wrong_prefix(self):
sensor = self._add_sensor(1)
sensor.children[1] = ChildSensor(1, self.gateway.const.Presentation.S_HUM)
sensor.children[1].values[self.gateway.const.SetReq.V_HUM] = '20'
self.gateway.recv('wrong/1/1/2/0/1', , 0)
ret = self.gateway.handle_queue()
self.assertEqual(ret, None) |
def test_presentation(self):
'Test handle presentation message.'
self._add_sensor(1)
self.gateway.logic('1;1;0;0;7;Humidity Sensor\n')
calls = [mock.call('/1/1/1/+/+', self.gateway.recv, 0), mock.call('/1/1/2/+/+', self.gateway.recv, 0), mock.call('/1/+/4/+/+', self.gateway.recv, 0)]
self.mock_sub.assert_has_calls(calls) | 5,016,948,576,630,798,000 | Test handle presentation message. | tests/test_gateway_mqtt.py | test_presentation | jslove/pymysensors | python | def test_presentation(self):
self._add_sensor(1)
self.gateway.logic('1;1;0;0;7;Humidity Sensor\n')
calls = [mock.call('/1/1/1/+/+', self.gateway.recv, 0), mock.call('/1/1/2/+/+', self.gateway.recv, 0), mock.call('/1/+/4/+/+', self.gateway.recv, 0)]
self.mock_sub.assert_has_calls(calls) |
def test_presentation_no_sensor(self):
'Test handle presentation message without sensor.'
self.gateway.logic('1;1;0;0;7;Humidity Sensor\n')
self.assertFalse(self.mock_sub.called) | 8,553,117,162,865,047,000 | Test handle presentation message without sensor. | tests/test_gateway_mqtt.py | test_presentation_no_sensor | jslove/pymysensors | python | def test_presentation_no_sensor(self):
self.gateway.logic('1;1;0;0;7;Humidity Sensor\n')
self.assertFalse(self.mock_sub.called) |
def test_subscribe_error(self):
'Test subscribe throws error.'
self._add_sensor(1)
self.mock_sub.side_effect = ValueError('No topic specified, or incorrect topic type.')
with self.assertLogs(level='ERROR') as test_handle:
self.gateway.logic('1;1;0;0;7;Humidity Sensor\n')
calls = [mock.call('/1/1/1/+/+', self.gateway.recv, 0), mock.call('/1/1/2/+/+', self.gateway.recv, 0)]
self.mock_sub.assert_has_calls(calls)
self.assertEqual(test_handle.output[0].split('\n', 1)[0], 'ERROR:mysensors.gateway_mqtt:Subscribe to /1/1/1/+/+ failed: No topic specified, or incorrect topic type.') | -7,662,311,185,010,614,000 | Test subscribe throws error. | tests/test_gateway_mqtt.py | test_subscribe_error | jslove/pymysensors | python | def test_subscribe_error(self):
self._add_sensor(1)
self.mock_sub.side_effect = ValueError('No topic specified, or incorrect topic type.')
with self.assertLogs(level='ERROR') as test_handle:
self.gateway.logic('1;1;0;0;7;Humidity Sensor\n')
calls = [mock.call('/1/1/1/+/+', self.gateway.recv, 0), mock.call('/1/1/2/+/+', self.gateway.recv, 0)]
self.mock_sub.assert_has_calls(calls)
self.assertEqual(test_handle.output[0].split('\n', 1)[0], 'ERROR:mysensors.gateway_mqtt:Subscribe to /1/1/1/+/+ failed: No topic specified, or incorrect topic type.') |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.