message
stringlengths 13
484
| diff
stringlengths 38
4.63k
|
---|---|
argparse: various fixes
add_subparsers uses keyword-only args
required parameter is new in py37 | @@ -135,7 +135,7 @@ class ArgumentParser(_AttributeHolder, _ActionsContainer):
def parse_args(self, *, namespace: _N) -> _N: ...
if sys.version_info >= (3, 7):
- def add_subparsers(self, title: str = ...,
+ def add_subparsers(self, *, title: str = ...,
description: Optional[str] = ...,
prog: str = ...,
parser_class: Type[ArgumentParser] = ...,
@@ -146,7 +146,7 @@ class ArgumentParser(_AttributeHolder, _ActionsContainer):
help: Optional[str] = ...,
metavar: Optional[str] = ...) -> _SubParsersAction: ...
else:
- def add_subparsers(self, title: Text = ...,
+ def add_subparsers(self, *, title: Text = ...,
description: Optional[Text] = ...,
prog: Text = ...,
parser_class: Type[ArgumentParser] = ...,
@@ -379,6 +379,7 @@ class _SubParsersAction(Action):
_name_parser_map: Dict[_Text, ArgumentParser]
choices: Dict[_Text, ArgumentParser]
_choices_actions: List[Action]
+ if sys.version_info >= (3, 7):
def __init__(self,
option_strings: Sequence[Text],
prog: Text,
@@ -387,6 +388,14 @@ class _SubParsersAction(Action):
required: bool = ...,
help: Optional[Text] = ...,
metavar: Optional[Union[Text, Tuple[Text, ...]]] = ...) -> None: ...
+ else:
+ def __init__(self,
+ option_strings: Sequence[Text],
+ prog: Text,
+ parser_class: Type[ArgumentParser],
+ dest: Text = ...,
+ help: Optional[Text] = ...,
+ metavar: Optional[Union[Text, Tuple[Text, ...]]] = ...) -> None: ...
# TODO: Type keyword args properly.
def add_parser(self, name: Text, **kwargs: Any) -> ArgumentParser: ...
def _get_subactions(self) -> List[Action]: ...
|
Update opennms-log4j-jndi-rce.yaml
Resolve conflict | @@ -53,4 +53,4 @@ requests:
regex:
- '([a-zA-Z0-9\.\-]+)\.([a-z0-9]+)\.([a-z0-9]+)\.\w+' # Print extracted ${hostName} in output
-# Enhanced by cs on 2022/10/06
+# Enhanced by cs on 2022/10/24
|
Build manylinux2010 wheels instead of older manylinux1
See | @@ -4,8 +4,8 @@ TESTOPTS?=
REPO = git://github.com/cython/cython.git
VERSION?=$(shell sed -ne 's|^__version__\s*=\s*"\([^"]*\)".*|\1|p' Cython/Shadow.py)
-MANYLINUX_IMAGE_X86_64=quay.io/pypa/manylinux1_x86_64
-MANYLINUX_IMAGE_686=quay.io/pypa/manylinux1_i686
+MANYLINUX_IMAGE_X86_64=quay.io/pypa/manylinux2010_x86_64
+MANYLINUX_IMAGE_686=quay.io/pypa/manylinux2010_i686
all: local
|
Remove redundant log option
This option is included in edalize after its update so it's not necessary here | @@ -510,8 +510,6 @@ class NextPnrInterchangeNoSynth(Toolchain):
self.fasm2bels = False
self.tool_options = dict()
- self.nextpnr_log = 'nextpnr.log'
-
def get_share_data(self):
out = subprocess.run(
['find', '.', '-name', self.toolchain_bin], stdout=subprocess.PIPE
@@ -597,9 +595,7 @@ class NextPnrInterchangeNoSynth(Toolchain):
)
self.files.append(get_file_dict(self.device_file, 'device'))
- self.options = [
- '--log', self.nextpnr_log, '--disable-lut-mapping-cache'
- ]
+ self.options = ['--disable-lut-mapping-cache']
self.env_script = os.path.abspath(
'env.sh'
) + ' nextpnr fpga_interchange-' + self.device
|
fix: Resolves ValueError when calling "manim cfg write -l user"
Percent signs added to parser needed to be re-escaped before writing
back to the parser. | @@ -201,11 +201,13 @@ modify write_cfg_subcmd_input to account for it.""",
)
temp = input()
- default[key] = temp
+ default[key] = temp.replace("%", "%%")
default = replace_keys(default) if category == "logger" else default
- parser[category] = dict(default)
+ parser[category] = {
+ i: v.replace("%", "%%") for i, v in dict(default).items()
+ }
else:
action = "open"
|
VectorDataWidget : Support Quaternions
Quatf and Quatd | @@ -1038,6 +1038,31 @@ _DataAccessor.registerType( IECore.V3iVectorData.staticTypeId(), _CompoundDataAc
_DataAccessor.registerType( IECore.V3fVectorData.staticTypeId(), _CompoundDataAccessor )
_DataAccessor.registerType( IECore.V3dVectorData.staticTypeId(), _CompoundDataAccessor )
+class _QuatDataAccessor( _CompoundDataAccessor ) :
+
+ def __init__( self, data, heading = "" ) :
+
+ _CompoundDataAccessor.__init__( self, data, heading = heading )
+
+ def numColumns( self ) :
+
+ return 4
+
+ def getElement( self, rowIndex, columnIndex ) :
+
+ v = self.data()[rowIndex]
+ if columnIndex == 0:
+ return GafferUI._Variant.toVariant( v.v()[0] )
+ if columnIndex == 1:
+ return GafferUI._Variant.toVariant( v.v()[1] )
+ if columnIndex == 2:
+ return GafferUI._Variant.toVariant( v.v()[2] )
+ if columnIndex == 3:
+ return GafferUI._Variant.toVariant( v.r() )
+
+_DataAccessor.registerType( IECore.QuatfVectorData.staticTypeId(), _QuatDataAccessor )
+_DataAccessor.registerType( IECore.QuatdVectorData.staticTypeId(), _QuatDataAccessor )
+
class _BoxDataAccessor( _CompoundDataAccessor ) :
def __init__( self, data, heading = "" ) :
@@ -1301,6 +1326,9 @@ _Delegate.registerType( IECore.Box3iVectorData.staticTypeId(), _NumericDelegate
_Delegate.registerType( IECore.Box3fVectorData.staticTypeId(), _NumericDelegate )
_Delegate.registerType( IECore.Box3dVectorData.staticTypeId(), _NumericDelegate )
+_Delegate.registerType( IECore.QuatfVectorData.staticTypeId(), _NumericDelegate )
+_Delegate.registerType( IECore.QuatdVectorData.staticTypeId(), _NumericDelegate )
+
class _BoolDelegate( _Delegate ) :
def __init__( self ) :
|
Remove eta from list of packages installed by fiftyone
This would cause uninstalling fiftyone to uninstall eta, but leave behind its metadata, causing pip to think eta was still installed | @@ -41,8 +41,7 @@ setup(
author_email="[email protected]",
url="https://github.com/voxel51/fiftyone",
license="",
- packages=find_packages() + ["eta"],
- package_dir={"eta": "eta/eta"},
+ packages=find_packages(),
include_package_data=True,
classifiers=[
"Operating System :: MacOS :: MacOS X",
|
Better flexbox sizing for metadata entries
Summary: Fixes (compromise solution)
Test Plan:
Manual
{F34647}
{F34646}
{F34645}
Reviewers: #ft, schrockn, bengotow | @@ -16,8 +16,8 @@ export const MetadataEntries: React.FunctionComponent<{
<tr key={idx} style={{ display: "flex" }}>
<td
style={{
- flex: "0 0 auto",
- width: "max-content"
+ flex: 1,
+ maxWidth: "max-content"
}}
>
{item.label}
@@ -141,4 +141,5 @@ const MetadataEntriesTable = styled.table`
border-right: 1px solid #dbc5ad;
vertical-align: top;
}
+ width: 100%;
`;
|
Update Minnesota.md
Closes
Closes
Closes
Closes | @@ -175,6 +175,68 @@ geolocation: 45.0766155, -93.3004313
* https://twitter.com/BGOnTheScene/status/1382521300065849344
+### Police throw stun grenades at protesters | 2021-04-16
+
+Police behind a chain link fence throw stun grenades over the fence into a crowd of gathered protesters who appear to be standing around chanting. During one instance, a protester's umbrella catches fire.
+
+tags: protester, stun-grenade, less-lethal, property-destruction
+
+id: mn-brooklyncenter-9
+
+geolocation: 45.0766155,-93.3004313
+
+**Links**
+
+* https://twitter.com/NickAtNews/status/1383248483918893064
+* https://twitter.com/JaylaWhitfield/status/1383252826680225793
+* https://twitter.com/NickAtNews/status/1383252959346130945
+
+
+### Police pepper spray crowd of protesters | 2021-04-16
+
+Police and protesters face off on opposite sides of a barricade. Police discharge pepper spray in a wide berth, targeting protesters who step towards them in particular. It is unclear what instigated this incident, although one protester does appear to be trying to recover a red umbrella on the ground.
+
+tags: protester, spray, pepper-spray, less-lethal
+
+id: mn-brooklyncenter-10
+
+geolocation: 45.0766155,-93.3004313
+
+**Links**
+
+* https://twitter.com/NickAtNews/status/1383249958371921925
+
+
+### Police shoot man with impact munition | 2021-04-16
+
+Police and protesters face off on opposite sides of a barricade. A man stands on the grass several feet from police. Police ordered him to step further from the barricade. He did not comply. Police then shot him in the leg with an impact munition.
+
+tags: protester, shoot, projectile, less-lethal
+
+id: mn-brooklyncenter-11
+
+geolocation: 45.0766155,-93.3004313
+
+**Links**
+
+* https://twitter.com/NickAtNews/status/1383250985905115138
+
+
+### Police pepper spray journalists | 2021-04-16
+
+A still image from Brooklyn Center, Minnesota shows police pepper spraying clearly marked journalists, one of whom is holding a camera.
+
+tags: journalist, spray, pepper-spray, less-lethal
+
+id: mn-brooklyncenter-12
+
+geolocation: 45.0766155,-93.3004313
+
+**Links**
+
+* https://twitter.com/AlexKentTN/status/1383290508181590018
+
+
## Minneapolis
### Police hit press in neck and head with wooden baton | 2020-05-26
|
Update local setup docs
Remove dependency on python-venv and python-pip
update how to run test | Make sure you have these things installed on your system:
* Git
-* Python 3.9.x
- * python3-venv \(to setup virtual enviroment\)
- * python3-pip \(to install python packages\)
+* Python 3.10.x
* PostgreSQL 12.x
- * libpq-dev \(on Linux at least\)
+ * libpq-dev (on Linux at least)
* Apache or Nginx
* Node 16.x
@@ -76,7 +74,7 @@ All the needed python packages for production are listed in the `requirements.tx
For a development environment you then run:
```text
-$ pip install -r requirements-dev.txt
+$ python -m pip install -r requirements-dev.txt
```
If any `requirements*.txt` file have been updated you will need to rerun this command to get the updated/added packages.
@@ -292,12 +290,6 @@ To build all assets for development use this command.
$ npm run dev:build
```
-If you are working on the React components you also need to set "API\_BASE\_URL" to the correct value.
-
-```text
-$ export API_BASE_URL='http://apply.hypha.test/api'
-```
-
To build the assets which get deployed, use the following. The deployment scripts will handle this, and the files should not be committed.
```text
@@ -366,18 +358,22 @@ Now you should be able to access the sites on [http://hypha.test/](http://hypha.
### Run tests
-Hypha has specific settings for testing so specify them when you run the "test" command.
+Hypha uses [py.test](https://pytest-django.readthedocs.io/en/latest/) test runner and use `hypha/settings/testing.py` for test settings.
+
+Run the test with:
```text
-$ python manage.py test --settings=hypha.settings.test
+$ pytest
```
If you need to rerun the tests several times this will speed them up considerably.
```text
-$ python manage.py test --parallel --keepdb --settings=hypha.settings.test
+$ pytest --reuse-db
```
+Alernatively, you can run `make test` command that will check for linting, run pytest with --reuse-db and show coverage report.
+
### Administration
* The Apply dashboard: [http://apply.hypha.test/dashboard/](http://apply.hypha.test/dashboard/)
|
Arithmetic: for symbol inputs, make sure the operation is concatenation
TN: minor | @@ -3240,6 +3240,7 @@ class Arithmetic(AbstractExpression):
r = construct(self.r)
if l.type == Symbol and r.type == Symbol:
+ assert self.op == '&'
return BasicExpr(
'Find (Self.Unit.TDH.Symbols, ({}.all & {}.all))',
Symbol,
|
support the lnurl fallback scheme.
completing | @@ -329,9 +329,10 @@ new Vue({
if (this.parse.data.request.startsWith('lightning:')) {
this.parse.data.request = this.parse.data.request.slice(10)
- }
- if (this.parse.data.request.startsWith('lnurl:')) {
+ } else if (this.parse.data.request.startsWith('lnurl:')) {
this.parse.data.request = this.parse.data.request.slice(6)
+ } else if (this.parse.data.request.indexOf('lightning=lnurl1') !== -1) {
+ this.parse.data.request = this.parse.data.request.split('lightning=')[1]
}
if (this.parse.data.request.toLowerCase().startsWith('lnurl1')) {
|
Updated contributing.rst
Rectified grammatical errors | @@ -6,7 +6,7 @@ Contributing to Scrapy
.. important::
- Double check you are reading the most recent version of this document at
+ Double check that you are reading the most recent version of this document at
https://doc.scrapy.org/en/master/contributing.html
There are many ways to contribute to Scrapy. Here are some of them:
@@ -18,7 +18,7 @@ There are many ways to contribute to Scrapy. Here are some of them:
* Report bugs and request features in the `issue tracker`_, trying to follow
the guidelines detailed in `Reporting bugs`_ below.
-* Submit patches for new functionality and/or bug fixes. Please read
+* Submit patches for new functionalities and/or bug fixes. Please read
:ref:`writing-patches` and `Submitting patches`_ below for details on how to
write and submit a patch.
@@ -80,8 +80,8 @@ guidelines when reporting a new bug.
Writing patches
===============
-The better written a patch is, the higher chance that it'll get accepted and
-the sooner that will be merged.
+The better written a patch is, higher is the chance that it'll get accepted and
+sooner it will be merged.
Well-written patches should:
|
Add etcd_volume_size parameter in coreos template
Without those fixes new cluster fails with message:
ERROR: The Parameter (etcd_volume_size) was not defined in template.
Task:
Story: 20337 | @@ -294,6 +294,12 @@ parameters:
domain name for cluster DNS
default: "cluster.local"
+ etcd_volume_size:
+ type: number
+ description: >
+ size of the cinder volume for etcd storage
+ default: 0
+
openstack_ca:
type: string
hidden: true
|
test_runner: Fix incorrect type for enforce_timely_test_completion.
Our TestResult objects are always actually TextTestResults. | @@ -96,7 +96,7 @@ def report_slow_tests() -> None:
print(' This may no longer be true: %s' % (slowness_reason,))
def enforce_timely_test_completion(test_method: Any, test_name: str,
- delay: float, result: TestResult) -> None:
+ delay: float, result: "TextTestResult") -> None:
if hasattr(test_method, 'slowness_reason'):
max_delay = 2.0 # seconds
else:
|
Load tags in docker-run-dev
Fix | @@ -20,6 +20,7 @@ run-uvicorn: ## Runs uvicorn (ASGI) server in managed mode
docker-run-dev: ## Runs dev server in docker
python ./utils/wait_for_postgres.py
python manage.py migrate
+ python manage.py update_tags
python manage.py runserver 0.0.0.0:8000
docker-run-production: ## Runs production server in docker
|
Fix exception.
time_string -> time_data | @@ -78,11 +78,11 @@ def _format_24h(h: int, m: int, s: int) -> str:
return text
-def thai_time(time_string: Union[time, datetime, str], fmt: str = "24h") -> str:
+def thai_time(time_data: Union[time, datetime, str], fmt: str = "24h") -> str:
"""
Convert time to Thai words.
- :param str time_string: time input, can be a datetime.time object \
+ :param str time_data: time input, can be a datetime.time object \
or a datetime.datetime object \
or a string (in H:M or H:M:S format, using 24-hour clock)
:param str fmt: time output format
@@ -112,29 +112,28 @@ def thai_time(time_string: Union[time, datetime, str], fmt: str = "24h") -> str:
"""
_time = None
- if isinstance(time_string, time) or isinstance(time_string, datetime):
- _time = time_string
+ if isinstance(time_data, time) or isinstance(time_data, datetime):
+ _time = time_data
else:
- if not isinstance(time_string, str):
+ if not isinstance(time_data, str):
raise TypeError(
- "Input must be a datetime.time object, a datetime.datetime object, or a string."
+ "Time data must be a datetime.time object, a datetime.datetime object, or a string."
)
- if not time_string:
- raise ValueError("Input string cannot be empty.")
+ if not time_data:
+ raise ValueError("Time string cannot be empty.")
try:
- _time = datetime.strptime(time_string, _TIME_FORMAT_WITH_SEC)
- if not _time:
- _time = datetime.strptime(
- time_string, _TIME_FORMAT_WITHOUT_SEC
- )
+ _time = datetime.strptime(time_data, _TIME_FORMAT_WITH_SEC)
+ except ValueError:
+ try:
+ _time = datetime.strptime(time_data, _TIME_FORMAT_WITHOUT_SEC)
except ValueError:
pass
if not _time:
raise ValueError(
- "Input string must be in either H:M or H:M:S format."
+ f"Time string '{time_data}' does not match H:M or H:M:S format."
)
format_func = None
|
cirrus ci: change cache key for pip dependencies
the cache should be considered stale if requirements.txt changes | @@ -30,7 +30,7 @@ task:
ELECTRUM_PYTHON_NAME: pypy3
pip_cache:
folder: ~/.cache/pip
- fingerprint_script: echo $ELECTRUM_IMAGE && cat $ELECTRUM_REQUIREMENTS
+ fingerprint_script: echo $ELECTRUM_IMAGE && cat $ELECTRUM_REQUIREMENTS_CI && cat $ELECTRUM_REQUIREMENTS
populate_script: mkdir -p ~/.cache/pip
electrum_cache:
folder: /tmp/electrum-build
@@ -42,13 +42,14 @@ task:
install_script:
- apt-get update
- apt-get -y install libsecp256k1-0
- - pip install -r $ELECTRUM_REQUIREMENTS
+ - pip install -r $ELECTRUM_REQUIREMENTS_CI
tox_script:
- tox
coveralls_script:
- if [ ! -z "$COVERALLS_REPO_TOKEN" ] ; then coveralls ; fi
env:
- ELECTRUM_REQUIREMENTS: contrib/requirements/requirements-travis.txt
+ ELECTRUM_REQUIREMENTS_CI: contrib/requirements/requirements-travis.txt
+ ELECTRUM_REQUIREMENTS: contrib/requirements/requirements.txt
# following CI_* env vars are set up for coveralls
CI_NAME: "CirrusCI"
CI_BUILD_NUMBER: $CIRRUS_BUILD_ID
@@ -66,7 +67,7 @@ task:
memory: 1G
pip_cache:
folder: ~/.cache/pip
- fingerprint_script: echo Locale && echo $ELECTRUM_IMAGE && cat $ELECTRUM_REQUIREMENTS
+ fingerprint_script: echo Locale && echo $ELECTRUM_IMAGE && cat $ELECTRUM_REQUIREMENTS_CI
populate_script: mkdir -p ~/.cache/pip
electrum_cache:
folder: /tmp/electrum-build
@@ -74,13 +75,13 @@ task:
install_script:
- apt-get update
- apt-get -y install libsecp256k1-0 gettext
- - pip install -r $ELECTRUM_REQUIREMENTS
+ - pip install -r $ELECTRUM_REQUIREMENTS_CI
- pip install requests
locale_script:
- contrib/push_locale
env:
ELECTRUM_IMAGE: python:3.7
- ELECTRUM_REQUIREMENTS: contrib/requirements/requirements-travis.txt
+ ELECTRUM_REQUIREMENTS_CI: contrib/requirements/requirements-travis.txt
# in addition, crowdin_api_key is set as an "override" in https://cirrus-ci.com/settings/...
depends_on:
- Tox Python 3.9
@@ -124,7 +125,7 @@ task:
- sleep 10s
- python3 -m unittest electrum/tests/regtest.py
env:
- ELECTRUM_REQUIREMENTS: contrib/requirements/requirements-travis.txt
+ ELECTRUM_REQUIREMENTS: contrib/requirements/requirements.txt
# ElectrumX exits with an error without this:
ALLOW_ROOT: 1
@@ -146,7 +147,7 @@ task:
- flake8 . --count --select=$ELECTRUM_LINTERS --show-source --statistics
env:
ELECTRUM_IMAGE: python:3.7
- ELECTRUM_REQUIREMENTS: contrib/requirements/requirements-travis.txt
+ ELECTRUM_REQUIREMENTS: contrib/requirements/requirements.txt
matrix:
- name: Flake8 Mandatory
env:
|
core: fix profiling
* SIGTERM safety net prevents profiler from writing results, so disable
it when profiling is active.
* fix warning corrupting stream when profiling=True | @@ -43,8 +43,13 @@ import sys
import threading
import time
import traceback
+import warnings
import zlib
+# TODO: usage of 'import' after setting __name__, but before fixing up
+# sys.modules generates a warning. This happens when profiling = True.
+warnings.filterwarnings('ignore',
+ "Parent module 'mitogen' not found while handling absolute import")
LOG = logging.getLogger('mitogen')
IOLOG = logging.getLogger('mitogen.io')
@@ -1129,6 +1134,7 @@ class ExternalContext(object):
self.channel.close()
def _on_broker_exit(self):
+ if not self.profiling:
os.kill(os.getpid(), signal.SIGTERM)
def _on_shutdown_msg(self, msg):
@@ -1139,6 +1145,7 @@ class ExternalContext(object):
self.broker.shutdown()
def _setup_master(self, profiling, parent_id, context_id, in_fd, out_fd):
+ self.profiling = profiling
if profiling:
enable_profiling()
self.broker = Broker()
|
emoji.js: Add `active_realm_emojis` dict.
This dict will hold all the realm emojis which have not been deactivated. | @@ -4,6 +4,7 @@ var exports = {};
exports.emojis = [];
exports.realm_emojis = {};
+exports.active_realm_emojis = {};
exports.emojis_by_name = {};
exports.emojis_name_to_css_class = {};
exports.emojis_by_unicode = {};
@@ -15,6 +16,7 @@ var zulip_emoji = {
emoji_name: 'zulip',
emoji_url: '/static/generated/emoji/images/emoji/unicode/zulip.png',
is_realm_emoji: true,
+ deactivated: false,
};
_.each(emoji_codes.names, function (value) {
@@ -33,17 +35,23 @@ _.each(emoji_codes.codepoints, function (value) {
exports.update_emojis = function update_emojis(realm_emojis) {
// exports.realm_emojis is emptied before adding the realm-specific emoji to it.
// This makes sure that in case of deletion, the deleted realm_emojis don't
- // persist in exports.realm_emojis.
+ // persist in exports.realm_emojis or exports.active_realm_emojis.
exports.realm_emojis = {};
+ exports.active_realm_emojis = {};
+
// Copy the default emoji list and add realm-specific emoji to it
exports.emojis = default_emojis.slice(0);
_.each(realm_emojis, function (data, name) {
exports.emojis.push({emoji_name: name, emoji_url: data.source_url, is_realm_emoji: true});
exports.realm_emojis[name] = {emoji_name: name, emoji_url: data.source_url};
+ if (data.deactivated !== true) {
+ exports.active_realm_emojis[name] = {emoji_name: name, emoji_url: data.source_url};
+ }
});
// Add the Zulip emoji to the realm emojis list
exports.emojis.push(zulip_emoji);
exports.realm_emojis.zulip = zulip_emoji;
+ exports.active_realm_emojis.zulip = zulip_emoji;
exports.emojis_by_name = {};
exports.emojis_name_to_css_class = {};
|
Update README.md
Updated for Boltcard NFC Card Creator v0.1.1 | @@ -21,10 +21,7 @@ The key #00, K0 (also know as auth key) is skipped to be used as authentificatio
***Always backup all keys that you're trying to write on the card. Without them you may not be able to change them in the future!***
## Setting the card - Boltcard NFC Card Creator (easy way)
-
-- Read the card with the app. Note UID so you can fill it in the extension later.
-- Write the link on the card. It shoud be like `YOUR_LNBITS_DOMAIN/boltcards/api/v1/scan/{external_id}`
- - `{external_id}` should be replaced with the External ID found in the LNBits dialog.
+Updated for v0.1.1
- Add new card in the extension.
- Set a max sats per transaction. Any transaction greater than this amount will be rejected.
@@ -32,14 +29,16 @@ The key #00, K0 (also know as auth key) is skipped to be used as authentificatio
- Set a card name. This is just for your reference inside LNBits.
- Set the card UID. This is the unique identifier on your NFC card and is 7 bytes.
- If on an Android device with a newish version of Chrome, you can click the icon next to the input and tap your card to autofill this field.
+ - Otherwise read it with the Android app (Advanced -> Read NFC) and paste it to the field.
- Advanced Options
- Card Keys (k0, k1, k2) will be automatically generated if not explicitly set.
- - Set to 16 bytes of 0s (00000000000000000000000000000000) to leave the keys in default (empty) state.
- - GENERATE KEY button fill the keys randomly. If there is "debug" in the card name, a debug set of keys is filled instead.
+ - Set to 16 bytes of 0s (00000000000000000000000000000000) to leave the keys in default (empty) state (this is unsecure).
+ - GENERATE KEY button fill the keys randomly.
- Click CREATE CARD button
-- Click the QR code button next to a card to view its details. Backup the keys! You can scan the QR code with the Android app to import the keys.
-- Click the "KEYS / AUTH LINK" button to copy the auth URL to the clipboard. You can then paste this into the Android app to import the keys.
-- Tap the NFC card to write the keys to the card.
+- Click the QR code button next to a card to view its details. Backup the keys now! They'll be comfortable in your password manager.
+ - Now you can scan the QR code with the Android app (Create Bolt Card -> SCAN QR CODE).
+ - Or you can Click the "KEYS / AUTH LINK" button to copy the auth URL to the clipboard. Then paste it into the Android app (Create Bolt Card -> PASTE AUTH URL).
+- Click WRITE CARD NOW and tap the NFC card to set it up. DO NOT REMOVE THE CARD PREMATURELY!
## Setting the card - computer (hard way)
@@ -69,4 +68,4 @@ Then fill up the card parameters in the extension. Card Auth key (K0) can be fil
- Save & Write
- Scan with compatible Wallet
-This app afaik cannot change the keys. If you cannot change them any other way, leave them empty in the extension dialog and remember you're not secure. Card Auth key (K0) can be omitted anyway. Initical counter can be 0.
+This app afaik cannot change the keys. If you cannot change them any other way, leave them empty in the extension dialog and remember you're not secured. Card Auth key (K0) can be omitted anyway. Initical counter can be 0.
|
Create a dedicated ResolvedExpression subclass for .is_visible_from
TN: | @@ -295,10 +295,17 @@ def is_visible_from(self, referenced_env, base_env):
:param AbstractExpression referenced_env: The environment referenced
from base_env, for which we want to check visibility.
"""
- return CallExpr('Is_Visible', 'Is_Visible_From', T.BoolType,
+ return IsVisibleFromExpr(referenced_env, base_env, abstract_expr=self)
+
+
+class IsVisibleFromExpr(CallExpr):
+ def __init__(self, referenced_env, base_env, abstract_expr=None):
+ super(IsVisibleFromExpr, self).__init__(
+ 'Is_Visible', 'Is_Visible_From', T.BoolType,
[construct(referenced_env, T.LexicalEnvType),
construct(base_env, T.LexicalEnvType)],
- abstract_expr=self)
+ abstract_expr=abstract_expr
+ )
@auto_attr
|
Fix typo in error message
"The package_file '+ package_file + ' should ends with..." replaced by "The package_file '+ package_file + ' should end with..." | @@ -381,7 +381,7 @@ class Client(object):
with open(package_file, 'r') as f:
return yaml.safe_load(f)
else:
- raise ValueError('The package_file '+ package_file + ' should ends with one of the following formats: [.tar.gz, .tgz, .zip, .yaml, .yml]')
+ raise ValueError('The package_file '+ package_file + ' should end with one of the following formats: [.tar.gz, .tgz, .zip, .yaml, .yml]')
def list_pipelines(self, page_token='', page_size=10, sort_by=''):
"""List pipelines.
|
SceneInspector : Make inheritance/history views more discoverable
This is another useful feature that users often don't know about. | @@ -785,7 +785,13 @@ class DiffRow( Row ) :
diffWidget.contextMenuSignal().connect( Gaffer.WeakMethod( self.__contextMenu ) ),
] )
- GafferUI.Spacer( IECore.V2i( 0 ), parenting = { "expand" : True } )
+ GafferUI.Spacer( IECore.V2i( 1, 20 ), parenting = { "expand" : True } )
+
+ GafferUI.MenuButton(
+ image = "gear.png",
+ hasFrame = False,
+ menu = GafferUI.Menu( Gaffer.WeakMethod( self.__menuDefinition ) )
+ )
self.__inspector = inspector
self.__diffCreator = diffCreator
@@ -828,16 +834,17 @@ class DiffRow( Row ) :
self.__menu = GafferUI.Menu( IECore.curry( Gaffer.WeakMethod( self.__menuDefinition ), widget ) )
self.__menu.popup()
- def __menuDefinition( self, widget ) :
+ def __menuDefinition( self, widget = None ) :
diff = self.__diff()
- if isinstance( diff, SideBySideDiff ) :
+ if isinstance( diff, SideBySideDiff ) and widget is not None :
# For SideBySideDiffs, we know which target the user has clicked on
# and only present menu items for that target.
targets = [ self.__targets[ 0 if widget is diff.getValueWidget( 0 ) else 1 ] ]
else :
# But for other Diff types we don't know, and so present menu items
- # for any target which has a value.
+ # for any target which has a value. The same applies when the user
+ # has raised the menu via the tool button rather than a right click.
targets = [ t for i, t in enumerate( self.__targets ) if self.__values[i] is not None ]
m = IECore.MenuDefinition()
|
Disables TestNN.test_CTCLoss_1d_target
Summary:
A variant of this test is flaky in CI. See
This disables the entire test until a fix is determined.
Pull Request resolved: | @@ -3277,19 +3277,21 @@ new_criterion_tests = [
check_gradgrad=False,
check_half=False,
),
- dict(
- module_name='CTCLoss',
- desc='1d_target',
- constructor_args=(14,), # blank=14
- extra_args=([50, 50, 50], [30, 25, 20]), # input_lengths, target_lengths
- input_fn=lambda: torch.randn(50, 3, 15).log_softmax(2),
- target_fn=lambda: torch.randint(0, 14, (3, 30), dtype=torch.long),
- reference_fn=lambda i, t, il, tl, m:
- ctcloss_reference(i, t, il, tl, blank=14, reduction=get_reduction(m)),
- check_sum_reduction=True,
- check_gradgrad=False,
- check_half=False,
- ),
+ # Test is flaky
+ # See https://github.com/pytorch/pytorch/issues/29380.
+ # dict(
+ # module_name='CTCLoss',
+ # desc='1d_target',
+ # constructor_args=(14,), # blank=14
+ # extra_args=([50, 50, 50], [30, 25, 20]), # input_lengths, target_lengths
+ # input_fn=lambda: torch.randn(50, 3, 15).log_softmax(2),
+ # target_fn=lambda: torch.randint(0, 14, (3, 30), dtype=torch.long),
+ # reference_fn=lambda i, t, il, tl, m:
+ # ctcloss_reference(i, t, il, tl, blank=14, reduction=get_reduction(m)),
+ # check_sum_reduction=True,
+ # check_gradgrad=False,
+ # check_half=False,
+ # ),
dict(
module_name='CTCLoss',
desc='2d_int_target',
|
Add experimental GPUTreeSHAP to API doc
Authors:
- Philip Hyunsu Cho (https://github.com/hcho3)
Approvers:
- Dante Gama Dessavre (https://github.com/dantegd)
URL: | @@ -584,3 +584,8 @@ Linear Models
-------------
.. autoclass:: cuml.experimental.linear_model.Lars
:members:
+
+Model Explainability
+--------------------
+.. autoclass:: cuml.experimental.explainer.TreeExplainer
+ :members:
|
Modified get_total_followers_or_followings
Use `with StringIO` if `to_file` is `None` | @@ -586,6 +586,8 @@ class API(object):
usernames=False,
to_file=None,
overwrite=False):
+ from io import StringIO
+
if which == 'followers':
key = 'follower_count'
get = self.get_user_followers
@@ -624,8 +626,7 @@ class API(object):
get(user_id, next_max_id)
last_json = self.last_json
try:
- if to_file is not None:
- f = open(to_file, 'a')
+ with open(to_file, 'a') if to_file is not None else StringIO() as f:
for item in last_json["users"]:
if filter_private and item['is_private']:
continue
@@ -651,8 +652,6 @@ class API(object):
print(msg.format(sleep_time / 60))
time.sleep(sleep_time)
sleep_track = 0
- if to_file is not None:
- f.close()
if not last_json["users"] or len(result) >= total:
return result[:total]
except Exception as e:
|
Fixed name of global variable
Changed the name ssl_port to DD_PORT | @@ -36,12 +36,6 @@ metadata = {
"ddsourcecategory": "aws",
}
-
-try:
- ssl_port = os.environ['DD_PORT']
-except Exception:
- ssl_port = 10516
-
cloudtrail_regex = re.compile('\d+_CloudTrail_\w{2}-\w{4,9}-\d_\d{8}T\d{4}Z.+.json.gz$', re.I)
@@ -49,6 +43,10 @@ DD_SOURCE = "ddsource"
DD_CUSTOM_TAGS = "ddtags"
DD_SERVICE = "service"
DD_URL = os.getenv("DD_URL", default="lambda-intake.logs.datadoghq.com")
+try:
+ DD_PORT = os.environ['DD_PORT']
+except Exception:
+ DD_PORT = 10516
# Pass custom tags as environment variable, ensure comma separated, no trailing comma in envvar!
@@ -67,7 +65,7 @@ def lambda_handler(event, context):
)
# Attach Datadog's Socket
- s = connect_to_datadog(DD_URL, ssl_port)
+ s = connect_to_datadog(DD_URL, DD_PORT)
# Add the context to meta
if "aws" not in metadata:
@@ -116,7 +114,7 @@ def safe_submit_log(s, log):
send_entry(s, log)
except Exception as e:
# retry once
- s = connect_to_datadog(DD_URL, ssl_port)
+ s = connect_to_datadog(DD_URL, DD_PORT)
send_entry(s, log)
return s
|
Update android_roamingmantis.txt
Too similar. | @@ -12446,9 +12446,9 @@ www\.[a-z]{1}\-[a-z]{1,3}\.top$
www\.[a-z]{2}\-[a-z]{2,3}\.(top|club)$
apple\-icloud\.[a-z]{3}\-japan\.com
\b(au|cat|cloudsbox|epos|jibun|jnb|jppost|
-kinggate|kuroneko|mailsa|mizuho|myau|mydocomo|mufg|jibun|netbk|nittsu|nttdocomo|post|poste|rakuten|nzpost|sagawa|samurai|sasekr|smbc|softbank|starbank|suyan|upsp|yamato)\-[a-z]{1,4}\.(cn|co|club|com|top)$
+kinggate|kuroneko|mailsa|mizuho|myau|mydocomo|mufg|jibun|netbk|nittsu|nttdocomo|post|poste|rakuten|nzpost|sagawa|samurai|sasekr|smbc|[i]?softbank|starbank|suyan|upsp|yamato)\-[a-z]{1,4}\.(cn|co|club|com|top)$
\b[a-z]{1,3}\-(cloudsbox|epos|jibun|jnb|jppost|
-kinggate|kuroneko|mailsa|mizuho|myau|mydocomo|mufg|jibun|netbk|nittsu|nttdocomo|nzpost|post|poste|rakuten|sagawa|samurai|sasekr|smbc|softbank|starbank|suyan|upsp|yamato)\.(cn|co|club|com|top)$
+kinggate|kuroneko|mailsa|mizuho|myau|mydocomo|mufg|jibun|netbk|nittsu|nttdocomo|nzpost|post|poste|rakuten|sagawa|samurai|sasekr|smbc|[i]?softbank|starbank|suyan|upsp|yamato)\.(cn|co|club|com|top)$
/phoneyzm.php
# APK trails
@@ -12849,3 +12849,10 @@ pot-i.top
http://45.137.183.50
postnord.top
+
+# Reference: https://twitter.com/KesaGataMe0/status/1292727294209847296
+
+cs-account-repair-info-softbank.com
+isoftbank-japan.com
+isoftbank-jp.com
+softbanknetwork.com
|
Image: Catch glance image not found exception
If we run a container from a glance image uuid, zun will fail as 500
error.
Fix this by catching NotFound exception while try to get image by uuid
Closes-Bug: | # See the License for the specific language governing permissions and
# limitations under the License.
+from glanceclient.common import exceptions as glance_exceptions
from oslo_utils import uuidutils
from zun.common import clients
@@ -45,9 +46,13 @@ def find_images(context, image_ident, exact_match):
glance = create_glanceclient(context)
if uuidutils.is_uuid_like(image_ident):
images = []
+ try:
image = glance.images.get(image_ident)
if image.container_format == 'docker':
images.append(image)
+ except glance_exceptions.NotFound:
+ # ignore exception
+ pass
else:
filters = {'container_format': 'docker'}
images = list(glance.images.list(filters=filters))
|
fix line mode for circle model
mode='line' was not working before and it made the example fail. | @@ -13,7 +13,7 @@ class Circle(Mesh):
self.vertices.append(point.world_position)
if mode == 'line': # add the first point to make the circle whole
- self.vertices.append(verts[0])
+ self.vertices.append(self.vertices[0])
destroy(origin)
super().__init__(vertices=self.vertices, mode=mode, **kwargs)
|
Fix formatting of the changelog
RTD didn't render it properly previously.
Refs | Changelog
=========
+===========
Development
===========
- (Fill this out as you fix issues and develop your features).
@@ -19,16 +20,19 @@ Development
- ``ListField`` now accepts an optional ``max_length`` parameter. #2110
- The codebase is now formatted using ``black``. #2109
+=================
Changes in 0.18.2
=================
- Replace deprecated PyMongo v2.x methods with their v3.x equivalents in the ``SequenceField``. #2097
- Various code clarity and documentation improvements.
+=================
Changes in 0.18.1
=================
- Fix a bug introduced in 0.18.0 which was causing ``Document.save`` to update all the fields instead of updating only the modified fields. This bug only occurred when using a custom PK. #2082
- Add Python 3.7 to Travis CI. #2058
+=================
Changes in 0.18.0
=================
- Drop support for EOL'd MongoDB v2.6, v3.0, and v3.2.
@@ -52,6 +56,7 @@ Changes in 0.18.0
- ``disconnect`` now clears the cached attribute ``Document._collection``.
- BREAKING CHANGE: ``EmbeddedDocument.save`` & ``.reload`` no longer exist. #1552
+=================
Changes in 0.17.0
=================
- POTENTIAL BREAKING CHANGE: All result fields are now passed, including internal fields (``_cls``, ``_id``) when using ``QuerySet.as_pymongo``. #1976
|
(new-config-parsing-9) Fix Alex's bash bug
Summary:
This PR confirms that config mapping
bug is fixed by this stack.
Depends on D1674
Test Plan: BK
Reviewers: alangenfeld, nate
Subscribers: alangenfeld | import pytest
from dagster_bash import bash_command_solid, bash_script_solid
-from dagster import DagsterExecutionStepExecutionError, execute_solid
+from dagster import DagsterExecutionStepExecutionError, composite_solid, execute_solid
def test_bash_command_solid():
@@ -47,3 +47,22 @@ def test_bash_script_solid():
environment_dict={'solids': {'foobar': {'config': {'env': {'MY_ENV_VAR': 'foobar'}}}}},
)
assert result.output_values == {'result': 'this is a test message: foobar'}
+
+
+def test_bash_script_solid_no_config():
+ script_dir = os.path.dirname(os.path.abspath(__file__))
+ solid = bash_script_solid(os.path.join(script_dir, 'test.sh'), name='foobar')
+ result = execute_solid(solid)
+ assert result.output_values == {'result': 'this is a test message:'}
+
+
+def test_bash_script_solid_no_config_composite():
+ script_dir = os.path.dirname(os.path.abspath(__file__))
+ solid = bash_script_solid(os.path.join(script_dir, 'test.sh'), name='foobar')
+
+ @composite_solid(config={}, config_fn=lambda _ctx, cfg: {})
+ def composite():
+ return solid()
+
+ result = execute_solid(composite)
+ assert result.output_values == {'result': 'this is a test message:'}
|
Updated windows.py message when Wix is not found.
Added recommendation to set environment variable when Wix Toolset is installed but not found. | @@ -171,11 +171,11 @@ class windows(app):
print(" * Looking for WiX Toolset...")
wix_path = os.getenv('WIX')
if not wix_path:
- print("Couldn't find WiX Toolset. Please visit:")
+ print("Couldn't find WiX Toolset. Please install the latest stable release from:")
print()
print(" http://wixtoolset.org/releases/")
print()
- print("and install the latest stable release.")
+ print("If already installed, set the WIX environment variable\nto the installation path.")
sys.exit(-2)
else:
print(" - Using {}".format(wix_path))
|
[contrib] Skip peer_memory test if world_size is not a multiple of 2
when world_size < 1 or world_size is odd | @@ -284,6 +284,11 @@ class TestPeerMemory(NcclDistributedTestBase):
def world_size(self) -> int:
return min(torch.cuda.device_count(), 2)
+ # TODO(crcrpar): Check if `world_size` being multiple of 2 is must.
+ def _check_world_size_and_may_skip(self) -> None:
+ if not (self.world_size >= 2 and self.world_size % 2 == 0):
+ self.skipTest(f"world_size is expected to be a multiple of 2 but, {self.world_size}")
+
def get_halo_excnahger_1d(self):
peer_ranks = [i for i in range(self.world_size)]
pool = PeerMemoryPool(64 * 1024, 2 * 1024 * 1024, peer_ranks)
@@ -291,6 +296,7 @@ class TestPeerMemory(NcclDistributedTestBase):
return halo_exchanger_1d
def test_height_split(self):
+ self._check_world_size_and_may_skip()
H_split_tests(
1,
64,
@@ -304,6 +310,7 @@ class TestPeerMemory(NcclDistributedTestBase):
)
def test_width_split(self):
+ self._check_world_size_and_may_skip()
W_split_tests(
1,
64,
|
Catch TypeError when not all required arguments are passed to a runner. Display error and usage.
Conflicts:
* salt/client/mixins.py | @@ -383,7 +383,11 @@ class SyncClientMixin(object):
# Initialize a context for executing the method.
with tornado.stack_context.StackContext(self.functions.context_dict.clone):
- data['return'] = self.functions[fun](*args, **kwargs)
+ func = self.functions[fun]
+ try:
+ data['return'] = func(*args, **kwargs)
+ except TypeError as exc:
+ data['return'] = '\nPassed invalid arguments: {0}\n\nUsage:\n{1}'.format(exc, func.__doc__)
try:
data['success'] = self.context.get('retcode', 0) == 0
except AttributeError:
|
Add issues to backlog method added
Implemented similar to the add issue to sprint method. | @@ -3588,6 +3588,21 @@ api-group-workflows/#api-rest-api-2-workflow-search-get)
# Agile(Formerly Greenhopper) REST API implements
# Resource: https://docs.atlassian.com/jira-software/REST/7.3.1/
#######################################################################
+ def add_issues_to_backlog(self, sprint_id, issues):
+ """
+ Adding Issue(s) to Backlog
+ :param issues: list: List of Issue Keys
+ eg. ['APA-1', 'APA-2']
+ :return: Dictionary of response received from the API
+
+ https://docs.atlassian.com/jira-software/REST/8.9.0/#agile/1.0/backlog-moveIssuesToBacklog
+ """
+ if not isinstance(issues, list):
+ raise ValueError("`issues` param should be List of Issue Keys")
+ url = "/rest/agile/1.0/backlog/issue"
+ data = dict(issues=issues)
+ return self.post(url, data=data)
+
def get_all_agile_boards(self, board_name=None, project_key=None, board_type=None, start=0, limit=50):
"""
Returns all boards. This only includes boards that the user has permission to view.
|
Improve efficiency of storage cleaning in mixed media envs -
documentation
Change
improved efficiency of storage cleaning in hybrid NVMe + HDD
environments by adding `erase_devices_express` clean step. This is a
follow up change adding the documentation for this feature.
Story:
Task: 43498 | @@ -73,6 +73,60 @@ cleaning steps.
See `How do I change the priority of a cleaning step?`_ for more information.
+Storage cleaning options
+------------------------
+
+Clean steps specific to storage are ``erase_devices``,
+``erase_devices_metadata`` and (added in Yoga) ``erase_devices_express``.
+
+``erase_devices`` aims to ensure that the data is removed in the most secure
+way available. On devices that support hardware assisted secure erasure
+(many NVMe and some ATA drives) this is the preferred option. If
+hardware-assisted secure erasure is not available and if
+``[deploy]/continue_if_disk_secure_erase_fails`` is set to ``True``, cleaning
+will fall back to using ``shred`` to overwrite the contents of the device.
+Otherwise cleaning will fail. It is important to note that ``erase_devices``
+may take a very long time (hours or even days) to complete, unless fast,
+hardware assisted data erasure is supported by all the devices in a system.
+Generally, it is very difficult (if possible at all) to recover data after
+performing cleaning with ``erase_devices``.
+
+``erase_devices_metadata`` clean step doesn't provide as strong assurance
+of irreversible destruction of data as ``erase_devices``. However, it has the
+advantage of a reasonably quick runtime (seconds to minutes). It operates by
+destroying metadata of the storage device without erasing every bit of the
+data itself. Attempts of restoring data after running
+``erase_devices_metadata`` may be successful but would certainly require
+relevant expertise and specialized tools.
+
+Lastly, ``erase_devices_express`` combines some of the perks of both
+``erase_devices`` and ``erase_devices_metadata``. It attempts to utilize
+hardware assisted data erasure features if available (currently only NVMe
+devices are supported). In case hardware-asssisted data erasure is not
+available, it falls back to metadata erasure for the device (which is
+identical to ``erase_devices_metadata``). It can be considered a
+time optimized mode of storage cleaning, aiming to perform as thorough
+data erasure as it is possible within a short period of time.
+This clean step is particularly well suited for environments with hybrid
+NVMe-HDD storage configuration as it allows fast and secure erasure of data
+stored on NVMes combined with equally fast but more basic metadata-based
+erasure of data on HDDs.
+``erase_devices_express`` is disabled by default. In order to use it, the
+following configuration is recommended.
+
+.. code-block:: ini
+
+ [deploy]/erase_devices_priority=0
+ [deploy]/erase_devices_metadata_priority=0
+ [conductor]/clean_step_priority_override=deploy.erase_devices_express:5
+
+This ensures that ``erase_devices`` and ``erase_devices_metadata`` are
+disabled so that storage is not cleaned twice and then assigns a non-zero
+priority to ``erase_devices_express``, hence enabling it. Any non-zero
+priority specified in the priority override will work.
+
+Also `[deploy]/enable_nvme_secure_erase` should not be disabled (it is on by default).
+
.. show-steps::
:phase: cleaning
|
copy to __deepcopy__
Change from overriding `copy` to `__deepcopy__` as deepcopy is used in
link `chainer/link.py:435` | @@ -9,6 +9,7 @@ from chainermn.functions import batch_normalization as \
chainermn_batch_normalization
import numpy
+import copy
class MultiNodeBatchNormalization(link.Link):
@@ -130,14 +131,14 @@ class MultiNodeBatchNormalization(link.Link):
"""
self.N = 0
- def copy(self, mode='share'):
+ def __deepcopy__(self, memo):
to_be_preserved = ['comm']
preserved = {}
for name in to_be_preserved:
preserved[name] = getattr(self, name)
setattr(self, name, None)
- ret = super(MultiNodeBatchNormalization, self).copy(mode)
+ ret = copy.deepcopy(super(MultiNodeBatchNormalization, self))
for name in to_be_preserved:
setattr(self, name, preserved[name])
|
2021 Hyundai Sonata N Line: Fingerprint
* 2021 Hyundai Sonata N Line: Fingerprint
* Force FPv2: 2021 Hyundai Sonata N Line
* Revert "Force FPv2: 2021 Hyundai Sonata N Line"
This reverts commit
* remove too short fw versions | @@ -220,6 +220,7 @@ FW_VERSIONS = {
b'\xf1\x00DN8_ SCC FHCUP 1.00 1.01 99110-L1000 ',
b'\xf1\x00DN89110-L0000 \xaa\xaa\xaa\xaa\xaa\xaa\xaa \xf1\xa01.00\xaa\xaa\xaa\xaa\xaa\xaa\xaa\x00\x00\x00',
b'\xf1\x00DN8 1.00 99110-L0000 \xaa\xaa\xaa\xaa\xaa\xaa\xaa \xf1\xa01.00\xaa\xaa\xaa',
+ b'\xf1\x00DN8 1.00 99110-L0000 \xaa\xaa\xaa\xaa\xaa\xaa\xaa\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00',
],
(Ecu.esp, 0x7d1, None): [
b'\xf1\x00DN ESC \a 106 \a\x01 58910-L0100',
@@ -244,6 +245,7 @@ FW_VERSIONS = {
b'\xf1\x87391162M013\xf1\xa00240',
b'HM6M1_0a0_F00',
b'HM6M2_0a0_BD0',
+ b'HM6M1_0a0_G20',
],
(Ecu.eps, 0x7d4, None): [
b'\xf1\x00DN8 MDPS C 1.00 1.01 \x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00 4DNAC101',
|
inte-tests: enable external ssl
In cloudify-cosmo/cloudify-manager-install#1323 we made docker managers
don't use external ssl by default. But in these tests, we do expect
the client to be using ssl. So let's enable it. | @@ -12,6 +12,7 @@ def run_manager(image, service_management, resource_mapping=None,):
manager:
security:
admin_password: admin
+ ssl_enabled: true
validations:
skip_validations: true
sanity:
|
Update avcodecs.py
could it be that easy | @@ -404,7 +404,7 @@ class VideoCodec(BaseCodec):
vfstring = ""
for line in vf:
- vfstring = "%s;%s" % (vfstring, line)
+ vfstring = "%s,%s" % (vfstring, line)
optlist.extend(['-vf', vfstring[1:]])
|
Added jupyter-server-proxy to install_requires
Need jupyter-server-proxy to setup websocket connections on JupyterHub and Binder. | @@ -18,7 +18,7 @@ except ImportError:
import versioneer
-install_requires = ['jupyter', 'numpy', 'ipykernel',
+install_requires = ['jupyter', 'jupyter-server-proxy', 'numpy', 'ipykernel',
'autobahn>=18.8.2']
if sys.version_info.major == 3 and sys.version_info.minor >= 5:
|
Increase timeout for taskcat version check
Set timeout 5 seconds
Fixes: | @@ -147,7 +147,7 @@ def get_pip_version(url):
"""
Given the url to PypI package info url returns the current live version
"""
- return requests.get(url, timeout=0.1).json()["info"]["version"]
+ return requests.get(url, timeout=5.0).json()["info"]["version"]
def get_installed_version():
|
Prepare 2.0.1rc4 (again).
The release of `2.0.1rc4` was paused while some broken windows were fixed. It can now be resumed.
[ci skip-rust] | @@ -6,12 +6,24 @@ This document describes releases leading up to the ``2.0.x`` ``stable`` series.
See https://www.pantsbuild.org/v2.0/docs/release-notes-2-0 for an overview of the changes in this release, and
https://www.pantsbuild.org/docs/plugin-upgrade-guide for a plugin upgrade guide.
-2.0.1rc4 (12/09/2020)
+2.0.1rc4 (12/16/2020)
---------------------
Bugfixes
~~~~~~~~
+* Fix filtering of log messages generated in native code. (cherrypick of #11313) (#11318)
+ `PR #11318 <https://github.com/pantsbuild/pants/pull/11318>`_
+
+* Upgrade to Pex 2.1.24 to fix macOS Big Sur. (cherrypick of #11312) (#11314)
+ `PR #11314 <https://github.com/pantsbuild/pants/pull/11314>`_
+
+* Clean the graph speculatively, and cancel nodes when interest is lost (cherrypick of #11308) (#11310)
+ `PR #11310 <https://github.com/pantsbuild/pants/pull/11310>`_
+
+* Implement native Process cache scoping to fix --test-force (cherrypick of #11291) (#11299)
+ `PR #11299 <https://github.com/pantsbuild/pants/pull/11299>`_
+
* Revert "Move graph cycle detection to rust. (#11202)" (cherrypick of #11272) (#11277)
`PR #11202 <https://github.com/pantsbuild/pants/pull/11202>`_
`PR #11277 <https://github.com/pantsbuild/pants/pull/11277>`_
|
Update README.md
Added 2020 Honda Accord Hybrid | @@ -67,7 +67,7 @@ Supported Cars
| Acura | ILX 2016-18 | AcuraWatch Plus | openpilot | 25mph<sup>1</sup> | 25mph |
| Acura | RDX 2016-18 | AcuraWatch Plus | openpilot | 25mph<sup>1</sup> | 12mph |
| Honda | Accord 2018-19 | All | Stock | 0mph | 3mph |
-| Honda | Accord Hybrid 2018-19 | All | Stock | 0mph | 3mph |
+| Honda | Accord Hybrid 2018-20 | All | Stock | 0mph | 3mph |
| Honda | Civic Hatchback 2017-19 | Honda Sensing | Stock | 0mph | 12mph |
| Honda | Civic Sedan/Coupe 2016-18 | Honda Sensing | openpilot | 0mph | 12mph |
| Honda | Civic Sedan/Coupe 2019-20 | Honda Sensing | Stock | 0mph | 2mph<sup>2</sup> |
|
feat: better error message for missing mesh manifests
Now prints when using vol.save_mesh(...):
Segment ID(s) $SEGIDS are missing corresponding mesh manifests.
Aborted. | @@ -1029,13 +1029,17 @@ class CloudVolume(object):
yield chunkimg, spt, ept
+ def _mesh_manifest_path(self, segid):
+ mesh_dir = self.info['mesh']
+ mesh_json_file_name = str(segid) + ':0'
+ return os.path.join(mesh_dir, mesh_json_file_name)
+
def get_mesh(self, segid):
"""Download the raw mesh fragments for this seg ID."""
- mesh_dir = self.info['mesh']
+ mesh_dir = self.info['mesh']
mesh_json_file_name = str(segid) + ':0'
-
- download_path = os.path.join(mesh_dir, mesh_json_file_name)
+ download_path = self._mesh_manifest_path(segid)
with Storage(self.layer_cloudpath, progress=self.progress) as stor:
fragments = json.loads(stor.get_file(download_path))['fragments']
@@ -1049,6 +1053,19 @@ class CloudVolume(object):
frag_datas = stor.get_files(paths)
return frag_datas
+ def _mesh_check_missing(self, segids):
+ """Check if there are any missing mesh manifests prior to downloading."""
+ manifest_paths = [ self._mesh_manifest_path(segid) for segid in segids ]
+ with Storage(self.layer_cloudpath, progress=self.progress) as stor:
+ exists = stor.files_exist(manifest_paths)
+
+ dne = []
+ for path, there in exists.items():
+ if not there:
+ (segid,) = re.search('(\d+):0$', path).groups()
+ dne.append(segid)
+ return dne
+
def save_mesh(self, segids, file_format='obj'):
"""
Save one or more segids into a common mesh format as a single file.
@@ -1060,6 +1077,16 @@ class CloudVolume(object):
if type(segids) != list:
segids = [ segids ]
+ dne = self._mesh_check_missing(segids)
+
+ if len(dne) > 0:
+ missing = ', '.join([ str(segid) for segid in dne ])
+ print(red(
+ 'Segment ID(s) {} are missing corresponding mesh manifests.\nAborted.' \
+ .format(missing)
+ ))
+ sys.exit()
+
fragments = []
for segid in segids:
fragments.extend( self.get_mesh(segid) )
|
Standalone: Fixup QML path for at least Linux
* Was working on Windows, but we better enforce the path, because on some Linux
platforms, this wasn't working. | @@ -452,6 +452,12 @@ QCoreApplication.setLibraryPaths(
)
]
)
+
+os.environ["QML2_IMPORT_PATH"] = os.path.join(
+ os.path.dirname(__file__),
+ "qml"
+)
+
""" % {
"package_name": full_name
}
|
left_sidebar: Move "Recent topics" higher in the sidebar.
This should increase its visual priority in the UI. We plan to move
"Private messages" to a different component more similar to STREAMS
soon.
Fixes | </a>
<span class="arrow all-messages-sidebar-menu-icon hidden-for-spectators"><i class="zulip-icon zulip-icon-ellipsis-v-solid" aria-hidden="true"></i></span>
</li>
+ <li class="top_left_recent_topics top_left_row" title="{{t 'Recent topics' }} (t)">
+ <a href="#recent_topics">
+ <span class="filter-icon">
+ <i class="fa fa-clock-o" aria-hidden="true"></i>
+ </span>
+ {{~!-- squash whitespace --~}}
+ <span>{{t 'Recent topics' }}</span>
+ </a>
+ </li>
<li class="top_left_private_messages hidden-for-spectators">
<div class="private_messages_header top_left_row" title="{{t 'Private messages' }} (P)">
<a href="#narrow/is/private">
</a>
<span class="arrow drafts-sidebar-menu-icon"><i class="zulip-icon zulip-icon-ellipsis-v-solid" aria-hidden="true"></i></span>
</li>
- <li class="top_left_recent_topics top_left_row" title="{{t 'Recent topics' }} (t)">
- <a href="#recent_topics">
- <span class="filter-icon">
- <i class="fa fa-clock-o" aria-hidden="true"></i>
- </span>
- {{~!-- squash whitespace --~}}
- <span>{{t 'Recent topics' }}</span>
- </a>
- </li>
</ul>
<div id="streams_list" class="zoom-out">
<div id="streams_header" class="zoom-in-hide"><h4 class="sidebar-title" data-tippy-content="{{t 'Filter streams' }} (q)">{{t 'STREAMS' }}</h4>
|
Update conf.json
fixed azure WAF test's instance | },
{
"integrations": "AzureWAF",
+ "instance_names": "azure_waf_prod",
"playbookID": "Azure WAF - Test",
"fromversion": "5.0.0"
},
|
UI: Data files are an option also for module mode
* This is for commercial users only, since it will require embedding
them, but the description was misleading to them. | @@ -389,7 +389,7 @@ times. Default empty.""",
parser.add_option_group(follow_group)
-data_group = OptionGroup(parser, "Data files for standalone/onefile mode")
+data_group = OptionGroup(parser, "Data files")
data_group.add_option(
"--include-package-data",
|
[dagit] Correct Runs top nav icon
Summary: I accidentally had the wrong icon in place for the top nav on `/instance/runs`.
Test Plan: View page, verify proper icon.
Reviewers: catherinewu | @@ -45,7 +45,7 @@ export const RunsRoot: React.FunctionComponent<RouteComponentProps> = () => {
return (
<RunsQueryRefetchContext.Provider value={{refetch: queryResult.refetch}}>
<ScrollContainer>
- <TopNav breadcrumbs={[{icon: 'outdated', text: 'Runs'}]} />
+ <TopNav breadcrumbs={[{icon: 'history', text: 'Runs'}]} />
<div style={{padding: '16px'}}>
<div
style={{
|
Markdown edit
[ci skip] | [](https://travis-ci.org/urschrei/pyzotero) [](https://coveralls.io/github/urschrei/pyzotero?branch=dev) [](https://pypi.python.org/pypi/Pyzotero/) [](https://pypi.python.org/pypi/Pyzotero/) [](http://pyzotero.readthedocs.org/en/latest/?badge=latest) [](license.txt) [](https://pypi.python.org/pypi/Pyzotero)
-# Quickstart #
+# Quickstart
1. `pip install pyzotero`
2. You'll need the ID of the personal or group library you want to access:
@@ -22,10 +22,12 @@ for item in items:
print('Item: %s | Key: %s') % (item['data']['itemType'], item['data']['key'])
```
-# Documentation #
+# Documentation
+
Full documentation of available Pyzotero methods, code examples, and sample output is available on [Read The Docs][3].
-# Installation #
+# Installation
+
* Using [pip][10]: `pip install pyzotero` (it's available as a wheel, and is tested on Python 2.7, 3.5, 3.6, and 3.7)
* From a local clone, if you wish to install Pyzotero from a specific branch:
@@ -38,19 +40,20 @@ git checkout dev
pip install .
```
-## Testing ##
+## Testing
Run `test_zotero.py` in the [pyzotero/test](test) directory, or, using [Nose2][7], `nose2` from the top-level directory.
-## Issues ##
+## Issues
The latest commits can be found on the [dev branch][9], although new features are currently rare. If you encounter an error, please open an issue.
-## Pull Requests ##
+## Pull Requests
Pull requests are welcomed. Please read the [contribution guidelines](CONTRIBUTING.md). In particular, please **base your PR on the `dev` branch**.
-## Versioning ##
+## Versioning
+
As of v1.0.0, Pyzotero is versioned according to [Semver](http://semver.org); version increments are performed as follows:
@@ -59,7 +62,7 @@ As of v1.0.0, Pyzotero is versioned according to [Semver](http://semver.org); ve
2. MINOR version will increment when functionality is added in a backwards-compatible manner, and
3. PATCH version will increment with backwards-compatible bug fixes.
-# License #
+# License
Pyzotero is licensed under the [MIT license][8]. See [license.txt](license.txt) for details.
|
Have test_tutorial.py copy metadata.staged to metadata
in order to allow testing of client creation script | @@ -280,9 +280,11 @@ class TestTutorial(unittest.TestCase):
repository.writeall()
- # Copying metadata to live repository not done here, as it is not tested
- # or worked with further in the tutorial (so we'd just copy and then
- # delete.)
+ # Simulate the following shell command:
+ ## $ cp -r "repository/metadata.staged/" "repository/metadata/"
+ shutil.copytree(
+ os.path.join('repository', 'metadata.staged'),
+ os.path.join('repository', 'metadata'))
|
update development docs to only recommend fnm
this works better for windows, is faster and avoids confusion by recommending one thing only | @@ -14,7 +14,7 @@ If you'd prefer to set up all the components manually, read on. These instructio
## Setting up the Wagtail codebase
-The preferred way to install the correct version of Node is to use [Node Version Manager (nvm)](https://github.com/nvm-sh/nvm) or [Fast Node Manager (fnm)](https://github.com/Schniz/fnm), which will always align the version with the supplied `.nvmrc` file in the root of the project. To ensure you are running the correct version of Node, run `nvm install` or `fnm install` from the project root.
+The preferred way to install the correct version of Node is to use [Fast Node Manager (fnm)](https://github.com/Schniz/fnm), which will always align the version with the supplied `.nvmrc` file in the root of the project. To ensure you are running the correct version of Node, run `fnm install` from the project root.
Alternatively, you can install [Node.js](https://nodejs.org/) directly, ensure you install the version as declared in the project's root `.nvmrc` file.
You will also need to install the **libjpeg** and **zlib** libraries, if you haven't done so already - see Pillow's [platform-specific installation instructions](https://pillow.readthedocs.io/en/stable/installation.html#external-libraries).
|
Fix 'ia' locale.
This unblocks our docker and travis builds. | @@ -2539,7 +2539,7 @@ msgid "%s responded with %s (%s)."
msgstr "%s ha respondite con %s (%s)."
#, python-format
-msgid "Connection to \"%s\" timed out."
+msgid "Connection to \"%s\" timed out.\n"
msgstr "Connexion a \"%s\" foras tempore limite.\n"
#, python-format
|
Better defaults values in thrift spec
Summary: title | @@ -7,7 +7,7 @@ struct AdditionalFeatureTypes {
struct RLParameters {
1: double gamma = 0.9,
2: double epsilon = 0.1,
- 3: double target_update_rate = 0.01,
+ 3: double target_update_rate = 0.001,
4: i32 reward_burnin = 1,
5: bool maxq_learning = true,
6: map<string, double> reward_boost,
@@ -28,7 +28,7 @@ struct RLParameters {
struct RainbowDQNParameters {
1: bool double_q_learning = true,
- 2: bool dueling_architecture = false,
+ 2: bool dueling_architecture = true,
}
struct CNNParameters {
@@ -53,11 +53,11 @@ struct FactorizationParameters {
}
struct TrainingParameters {
- 1: i32 minibatch_size = 16384,
- 2: double learning_rate = 0.01,
+ 1: i32 minibatch_size = 4096,
+ 2: double learning_rate = 0.001,
3: string optimizer = 'ADAM',
- 4: list<i32> layers = [-1, 512, 256, 128, 1],
- 5: list<string> activations = ['relu', 'relu', 'relu', 'linear'],
+ 4: list<i32> layers = [-1, 256, 128, 1],
+ 5: list<string> activations = ['relu', 'relu', 'linear'],
6: string lr_policy = 'fixed',
7: double lr_decay = 0.999,
8: double dropout_ratio = 0.0,
@@ -116,14 +116,14 @@ struct ContinuousActionModelParameters {
}
struct DDPGNetworkParameters {
- 1: list<i32> layers = [-1, 512, 256, 128, 1],
- 2: list<string> activations = ['relu', 'relu', 'relu', 'tanh'],
+ 1: list<i32> layers = [-1, 256, 128, 1],
+ 2: list<string> activations = ['relu', 'relu', 'tanh'],
3: double l2_decay = 0.01,
4: double learning_rate = 0.001,
}
struct DDPGTrainingParameters {
- 1: i32 minibatch_size = 128,
+ 1: i32 minibatch_size = 2048,
2: double final_layer_init = 0.003,
3: string optimizer = 'ADAM',
4: optional string warm_start_model_path,
@@ -142,7 +142,7 @@ struct DDPGModelParameters {
struct OptimizerParameters {
1: string optimizer = 'ADAM',
- 2: double learning_rate = 0.01,
+ 2: double learning_rate = 0.001,
3: double l2_decay = 0.01,
}
|
org settings: Add typeahead to user group member inputs.
Fixes | @@ -59,6 +59,39 @@ exports.populate_user_groups = function () {
}
}
+ var input = pill_container.children('.input');
+
+ input.typeahead({
+ items: 5,
+ fixed: true,
+ dropup: true,
+ source: people.get_realm_persons,
+ highlighter: function (item) {
+ return typeahead_helper.render_person(item);
+ },
+ matcher: function (item) {
+ if (pills.keys().includes(item.user_id)) {
+ return false;
+ }
+
+ var person = people.get_person_from_user_id(item.user_id);
+ var query = this.query.toLowerCase();
+
+ return (person.email.toLowerCase().indexOf(query) !== -1
+ || person.full_name.toLowerCase().indexOf(query) !== -1);
+ },
+ sorter: function (matches) {
+ return typeahead_helper.sort_recipientbox_typeahead(
+ this.query, matches, "");
+ },
+ updater: function (user) {
+ pills.pill.append(user.full_name, user.user_id);
+ input.text('');
+ update_save_state(pills.keys());
+ },
+ stopAdvance: true,
+ });
+
pills.onPillCreate(function (value, reject) {
var person = people.get_by_email(value);
var draft_group = pills.keys();
|
Add 'IF NOT EXISTS' to prevent duplicate table exception
PR for issue Avoid exception handling for PostgresTarget.create_marker_table() with "CREATE TABLE IF NOT EXISTS" | @@ -207,24 +207,19 @@ class PostgresTarget(luigi.Target):
connection.autocommit = True
cursor = connection.cursor()
if self.use_db_timestamps:
- sql = """ CREATE TABLE {marker_table} (
+ sql = """ CREATE TABLE IF NOT EXISTS {marker_table} (
update_id TEXT PRIMARY KEY,
target_table TEXT,
inserted TIMESTAMP DEFAULT NOW())
""".format(marker_table=self.marker_table)
else:
- sql = """ CREATE TABLE {marker_table} (
+ sql = """ CREATE TABLE IF NOT EXISTS {marker_table} (
update_id TEXT PRIMARY KEY,
target_table TEXT,
inserted TIMESTAMP);
""".format(marker_table=self.marker_table)
- try:
+
cursor.execute(sql)
- except psycopg2.ProgrammingError as e:
- if e.pgcode == psycopg2.errorcodes.DUPLICATE_TABLE:
- pass
- else:
- raise
connection.close()
def open(self, mode):
|
Fix usage of deprecated suffix argument in doc2path
Replace call to doc2path with os.path.join as Sphinx did with their
embedded builders. | @@ -434,7 +434,7 @@ class PDFBuilder(Builder):
if docname not in self.env.all_docs:
yield docname
continue
- targetname = self.env.doc2path(docname, self.outdir, self.out_suffix)
+ targetname = os.path.join(self.outdir, docname + self.out_suffix)
try:
targetmtime = os.path.getmtime(targetname)
except Exception:
|
Update writing.rst
We identified this issue, which was very difficult to track down. Don't think it's really a bug, and likely state names should never contain a hyphen anyways. But basically it renders the name as (first - second) = function(). (first-second.py), and you get a "can't assign to operator" exception. | @@ -153,6 +153,9 @@ distributed manually to minions by running :mod:`saltutil.sync_states
<salt.modules.saltutil.sync_all>`. Alternatively, when running a
:ref:`highstate <running-highstate>` custom types will automatically be synced.
+NOTE: Writing state modules with hyphens in the filename will cause issues
+with !pyobjects routines. Best practice to stick to underscores.
+
Any custom states which have been synced to a minion, that are named the same
as one of Salt's default set of states, will take the place of the default
state with the same name. Note that a state module's name defaults to one based
|
Remove errant create_reloaded_repository_location re-implementation
Summary: This was moved to the base class in so we don't need these re-implementations now.
Test Plan: BK
Reviewers: alangenfeld | @@ -399,9 +399,6 @@ def __init__(self, repository_location_handle):
self.external_repositories = {repo.name: repo for repo in external_repositories_list}
- def create_reloaded_repository_location(self):
- return GrpcServerRepositoryLocation(self._handle)
-
@property
def is_reload_supported(self):
return True
@@ -556,9 +553,6 @@ def __init__(self, repository_location_handle):
repo_list = sync_get_external_repositories(self._handle)
self.external_repositories = {er.name: er for er in repo_list}
- def create_reloaded_repository_location(self):
- return PythonEnvRepositoryLocation(self._handle)
-
@property
def is_reload_supported(self):
return True
|
Zero out all observations if we get any NaNs.
Now we are NaN-proof! | @@ -25,9 +25,14 @@ import numpy as np
def historical_metric_values(history, metric):
"""Converts a metric stream from a trax History object into a numpy array."""
metric_sequence = history.get(*metric)
- return np.array([
+ metric_values = np.array([
metric_value for (_, metric_value) in metric_sequence
])
+ if np.any(np.isnan(metric_values)):
+ # Zero out all observations if any element is NaN. This way the agent
+ # doesn't get any rewards, so it learns to avoid those regions.
+ metric_values[:] = 0.0
+ return metric_values
def metric_to_observation(metric_values, metric_range):
|
File manager context not always active in windows explorer
add Windows-Explorer as application name | @@ -9,6 +9,8 @@ apps.windows_explorer = """
os: windows
and app.name: Windows Explorer
os: windows
+and app.name: Windows-Explorer
+os: windows
and app.exe: explorer.exe
"""
@@ -84,14 +86,17 @@ if app.platform == "windows":
]
[email protected]_class('user')
[email protected]_class("user")
class UserActions:
def file_manager_go_back():
- actions.key('alt-left')
+ actions.key("alt-left")
+
def file_manager_go_forward():
- actions.key('alt-right')
+ actions.key("alt-right")
+
def file_manager_open_parent():
- actions.key('alt-up')
+ actions.key("alt-up")
+
def file_manager_current_path():
path = ui.active_window().title
|
Fix a little typo in engine docstring
Closes | @@ -31,7 +31,7 @@ class Engine(Serializable):
last_event_name: last event name triggered by the engine.
Note:
- :class:`~ignite.engine.engine.Engine` implementation has changed in v0.4.10 with "interrupt/resume" feature.
+ :class:`~ignite.engine.engine.Engine` implementation has changed in v0.5.0 with "interrupt/resume" feature.
Engine may behave differently on certain corner cases compared to the one from v0.4.9 and before.
In such case, you can set ``Engine.interrupt_resume_enabled = False`` to restore previous behaviour.
@@ -1013,7 +1013,7 @@ class Engine(Serializable):
self.should_interrupt = False
yield self.state
- def _run_once_on_dataset_as_gen(self) -> Generator:
+ def _run_once_on_dataset_as_gen(self) -> Generator[State, None, float]:
start_time = time.time()
# We need to setup iter_counter > 0 if we resume from an iteration
|
Update configuration.rst
Added a couple of sentences to explain that creation of a queue is a prerequisite for automatically creating tickets from e-mail. | @@ -19,6 +19,8 @@ Before django-helpdesk will be much use, you need to do some basic configuration
**IMPORTANT NOTE**: Any tickets created via POP3 or IMAP mailboxes will DELETE the original e-mail from the mail server.
+ You will need to create a support queue, and associated login/host values, in the Django admin interface, in order for mail to be picked-up from the mail server and placed in the tickets table of your database. The values in the settings file alone, will not create the necessary values to trigger the get_email function.
+
4. If you wish to automatically escalate tickets based on their age, set up a cronjob to run the escalation command on a regular basis::
0 * * * * /path/to/helpdesksite/manage.py escalate_tickets
|
Add support for FusedBatchNormV3
No changes seem to be needed to _fused_batch_norm. It just works. | @@ -1561,6 +1561,7 @@ _convert_map = {
'FloorMod' : _floormod(),
'FusedBatchNorm' : _fused_batch_norm(),
'FusedBatchNormV2' : _fused_batch_norm(),
+ 'FusedBatchNormV3' : _fused_batch_norm(),
'Gather' : _gather(),
'GatherNd' : _gather_nd(),
'GatherV2' : _gather(),
|
Add information on downloading historical results
Adds documentation to engine docs on how to get previous
results using the program and job id. | @@ -191,3 +191,62 @@ for b in range(num_circuits_in_batch):
idx+=1
```
+
+## Downloading historical results
+
+Results from previous computations are archived and can be downloaded later
+by those in the same cloud project. You must use the same project id to
+access historical results or your request will be denied.
+
+Each time that you run a circuit or sweep, the `Engine` class will generate
+a program id and job id for you. (You can also specify the program and job id
+yourself when running the program). Both the program and job id will need to be
+unique within the project. In order to retrieve previous results,
+you will need both this program id as well as the job id.
+If these were generated by the `Engine`, they can be retrieved from the
+job object when you run a sweep.
+Currently, getting the program and job ids can only be done through the
+`Engine` interface and not through the sampler interface.
+You can then use `get_program` and `get_job` to retrieve the results.
+See below for an example:
+
+```
+# Initialize the engine object
+engine = cirq.google.Engine(project_id='YOUR_PROJECT_ID')
+
+# Create an example circuit
+qubit = cirq.GridQubit(5, 2)
+circuit = cirq.Circuit(
+ cirq.X(qubit)**sympy.Symbol('t'),
+ cirq.measure(qubit, key='result')
+)
+param_sweep = cirq.Linspace('t', start=0, stop=1, length=10)
+
+# Run the circuit
+job = e.run_sweep(program=circuit,
+ params=param_sweep,
+ repetitions=1000,
+ processor_ids=[PROCESSOR_ID],
+ gate_set=GATE_SET)
+
+# Save the program and jo id for later
+program_id = job.program_id
+job_id = job.job_id
+
+# Retrieve the results
+results = job.results()
+
+# ...
+# Some time later, the results can be retrieved
+# ...
+
+# Recreate the job object
+historical_job = engine.get_program(program_id=program_id).get_job(job_id=job_id)
+
+# Retrieve the results
+historical_results = historical_job.results()
+
+```
+
+If you did not save the ids, you can still find them from your
+job using the [Cloud Console](https://console.cloud.google.com/quantum/jobs).
|
`requests`: improve `_Data` type
requests: improve _Data type
This allows to pass an Iterable[bytes] for streaming request data. | from _typeshed import Self, SupportsItems, SupportsRead
from collections.abc import Callable, Iterable, Mapping, MutableMapping
-from typing import IO, Any, Union
+from typing import Any, Union
from typing_extensions import TypeAlias, TypedDict
from urllib3._collections import RecentlyUsedContainer
@@ -45,7 +45,25 @@ class SessionRedirectMixin:
def rebuild_proxies(self, prepared_request, proxies): ...
def should_strip_auth(self, old_url, new_url): ...
-_Data: TypeAlias = str | bytes | Mapping[str, Any] | Iterable[tuple[str, str | None]] | IO[Any]
+_Data: TypeAlias = (
+ # used in requests.models.PreparedRequest.prepare_body
+ #
+ # case: is_stream
+ # see requests.adapters.HTTPAdapter.send
+ # will be sent directly to http.HTTPConnection.send(...) (through urllib3)
+ Iterable[bytes]
+ # case: not is_stream
+ # will be modified before being sent to urllib3.HTTPConnectionPool.urlopen(body=...)
+ # see requests.models.RequestEncodingMixin._encode_params
+ # see requests.models.RequestEncodingMixin._encode_files
+ # note that keys&values are converted from Any to str by urllib.parse.urlencode
+ | str
+ | bytes
+ | SupportsRead[str | bytes]
+ | list[tuple[Any, Any]]
+ | tuple[tuple[Any, Any], ...]
+ | Mapping[Any, Any]
+)
_Auth: TypeAlias = Union[tuple[str, str], _auth.AuthBase, Callable[[PreparedRequest], PreparedRequest]]
_Cert: TypeAlias = Union[str, tuple[str, str]]
# Files is passed to requests.utils.to_key_val_list()
|
Complete the mlqa dataset card
* Added more details to dataset card of mlqa dataset
* added license and other required details to mlqa
* Modified dataset card of mlqa dataset
Changed language creators tag in dataset card of mlqa dataset to crowdsourced. | pretty_name: MLQA (MultiLingual Question Answering)
language:
- en
+- de
+- es
+- ar
+- zh
+- vi
+- hi
+license:
+- cc-by-sa-3.0
+source_datasets:
+- original
+size_categories:
+- 10K<n<100K
+language_creators:
+- crowdsourced
+annotations_creators:
+- crowdsourced
+multilinguality:
+- multilingual
+task_categories:
+- question-answering
+task_ids:
+- extractive-qa
paperswithcode_id: mlqa
---
@@ -54,7 +76,7 @@ paperswithcode_id: mlqa
### Languages
-[More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
+MLQA contains QA instances in 7 languages, English, Arabic, German, Spanish, Hindi, Vietnamese and Simplified Chinese.
## Dataset Structure
@@ -230,7 +252,8 @@ The data fields are the same among all splits.
title = {MLQA: Evaluating Cross-lingual Extractive Question Answering},
author = {Lewis, Patrick and Oguz, Barlas and Rinott, Ruty and Riedel, Sebastian and Schwenk, Holger},
journal = {arXiv preprint arXiv:1910.07475},
- year={2019}
+ year = 2019,
+ eid = {arXiv: 1910.07475}
}
```
|
Update elf_mirai.txt
[0]
Standatd row of ```{ext}```. | @@ -281,3 +281,22 @@ ukrainianhorseriding.com
/spc.yakuza
/srv.yakuza
/x86.yakuza
+
+# Reference: https://twitter.com/VessOnSecurity/status/1051226957118103560
+
+/gemini.arm
+/gemini.arm5
+/gemini.arm6
+/gemini.arm7
+/gemini.dbg
+/gemini.i586
+/gemini.i686
+/gemini.m68k
+/gemini.mips
+/gemini.mpsl
+/gemini.ppc
+/gemini.ppc440
+/gemini.sh4
+/gemini.spc
+/gemini.srv
+/gemini.x86
|
Parse infraction search reason as regex before calling site
Previously this would raise an error within site due to the invalid regexp. | +import re
import textwrap
import typing as t
@@ -275,6 +276,11 @@ class ModManagement(commands.Cog):
@infraction_search_group.command(name="reason", aliases=("match", "regex", "re"))
async def search_reason(self, ctx: Context, reason: str) -> None:
"""Search for infractions by their reason. Use Re2 for matching."""
+ try:
+ re.compile(reason)
+ except re.error as e:
+ raise commands.BadArgument(f"Invalid regular expression in `reason`: {e}")
+
infraction_list = await self.bot.api_client.get(
'bot/infractions/expanded',
params={'search': reason}
|
Make `get_default_exporters` a static method of runner class
* the function `get_default_exporters` is only used by `TestRunner`
and its sub classes. Make it a member method of runner class so
the sub classes can overwrite it and accept new config options
which is related to command line arguments. | @@ -37,27 +37,6 @@ from testplan.testing import listing, filtering, ordering, tagging
from testplan.testing.base import TestResult
-def get_default_exporters(config):
- """
- Instantiate certain exporters if related cmdline argument (e.g. --pdf)
- is passed but there aren't any exporter declarations.
- """
- result = []
- if config.pdf_path:
- result.append(test_exporters.PDFExporter())
- if config.report_tags or config.report_tags_all:
- result.append(test_exporters.TagFilteredPDFExporter())
- if config.json_path:
- result.append(test_exporters.JSONExporter())
- if config.xml_dir:
- result.append(test_exporters.XMLExporter())
- if config.http_url:
- result.append(test_exporters.HTTPExporter())
- if config.ui_port is not None:
- result.append(test_exporters.WebServerExporter(ui_port=config.ui_port))
- return result
-
-
def get_exporters(values):
"""
Validation function for exporter declarations.
@@ -322,7 +301,7 @@ class TestRunner(Runnable):
:py:class:`Resources <testplan.exporters.testing.base.Exporter>`.
"""
if self._exporters is None:
- self._exporters = get_default_exporters(self.cfg)
+ self._exporters = self.get_default_exporters()
if self.cfg.exporters:
self._exporters.extend(self.cfg.exporters)
for exporter in self._exporters:
@@ -331,6 +310,28 @@ class TestRunner(Runnable):
exporter.parent = self
return self._exporters
+ def get_default_exporters(self):
+ """
+ Instantiate certain exporters if related cmdline argument (e.g. --pdf)
+ is passed but there aren't any exporter declarations.
+ """
+ exporters = []
+ if self.cfg.pdf_path:
+ exporters.append(test_exporters.PDFExporter())
+ if self.cfg.report_tags or self.cfg.report_tags_all:
+ exporters.append(test_exporters.TagFilteredPDFExporter())
+ if self.cfg.json_path:
+ exporters.append(test_exporters.JSONExporter())
+ if self.cfg.xml_dir:
+ exporters.append(test_exporters.XMLExporter())
+ if self.cfg.http_url:
+ exporters.append(test_exporters.HTTPExporter())
+ if self.cfg.ui_port is not None:
+ exporters.append(
+ test_exporters.WebServerExporter(ui_port=self.cfg.ui_port)
+ )
+ return exporters
+
def add_environment(self, env, resource=None):
"""
Adds an environment to the target resource holder.
|
Adding a unit test for the new changes.
Thanks | @@ -200,3 +200,41 @@ def test_version_check_remote_true_not_available():
expected = {"ack": {"installed": ["3.1.1"]}}
result = chocolatey.version("ack", check_remote=True)
assert result == expected
+
+
+def test_add_source(choco_path):
+ """
+ Test add_source when remote is False
+ """
+ cmd_run_all_mock = MagicMock(return_value={"retcode": 0, "stdout": "data"})
+ cmd_run_which_mock = MagicMock(return_value=choco_path)
+ with patch.dict(chocolatey.__salt__, {"cmd.which": cmd_run_which_mock,
+ "cmd.run_all": cmd_run_all_mock}):
+ expected_call = [
+ choco_path,
+ "sources",
+ "add",
+ "--name",
+ "source_name",
+ "--source",
+ "source_location",
+ ]
+
+ result = chocolatey.add_source("source_name", "source_location")
+ cmd_run_all_mock.assert_called_with(expected_call, python_shell=False)
+
+ expected_call = [
+ choco_path,
+ "sources",
+ "add",
+ "--name",
+ "source_name",
+ "--source",
+ "source_location",
+ "--priority",
+ "priority"
+ ]
+
+ result = chocolatey.add_source(
+ "source_name", "source_location", priority="priority")
+ cmd_run_all_mock.assert_called_with(expected_call, python_shell=False)
|
Update common-single-facility-sign-in.feature
Slight wording changes | Feature: Single facility sign in
- Kolibri users should see their facility name when they sign in
+ Kolibri users need to see the name of the facility they are signing into
Background:
Given there is only one facility on the device
And I am on the sign in page
-
- Scenario: Learner signs in
+ Scenario: Sign in to facility
When I open Kolibri in the browser
Then I see *Sign in to <facility>*
And I see the sign in form
+
+Examples:
+| facility |
+| MySchool |
|
Use npm package for running several subcommands
This approach has the advantage of capturing STDOUT of all the
subcommands | "build-css": "lessc ../css/index.less > ../../static/css/index.css",
"build-config": "browserify src/config.js --standalone config -t envify | uglifyjs > ../../static/js/config.min.js",
"build-clipboardjs": "cp node_modules/clipboard/dist/clipboard.min.js ../../static/js/",
- "build": "npm run build-config | npm run build-js | npm run build-css | npm run build-clipboardjs",
+ "build": "npm-run-all -p build-config build-js build-css build-clipboardjs",
"lint": "eslint src/*.js",
"watch-config": "watchify src/config.js --standalone config -t envify -o ../../static/js/config.js --debug --verbose",
"watch-css": "when-changed ../css/ 'npm run build-css'",
- "watch": "npm run build-clipboardjs | npm run watch-config | npm run watch-css | npm run watch-js"
+ "watch": "npm-run-all -p build-clipboardjs watch-config watch-css watch-js"
},
"browser": {
"select2": "./vendor/select2.full.min.js",
"jquery": "^1.11.3",
"mapbox.js": "^2.2.1",
"moment": "^2.10.3",
+ "npm-run-all": "^4.1.2",
"uglify-js": "^3.2.2",
"uglifyify": "^4.0.5",
"underscore": "^1.8.3"
|
Tweak coveralls configuration
coveralls.io is throwing 422 and breaking CI,
this may or may not help.
Related: | @@ -50,6 +50,6 @@ jobs:
coverage run --omit=*/tests/* --source=hc manage.py test
- name: Coveralls
if: matrix.db == 'postgres' && matrix.python-version == '3.8'
- run: coveralls
+ run: coveralls --service=github
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
\ No newline at end of file
|
tooltips: Fix doubling of keyboard-icon.
This fixes a bug where the default fade-in animation of bootstrap tool
tips caused the tool tip over the keyboard-icon to change shape slightly
while fading away.
Fixes | @@ -266,7 +266,9 @@ exports.initialize_kitchen_sink_stuff = function () {
$('.copy_message[data-toggle="tooltip"]').tooltip();
- $('#keyboard-icon').tooltip();
+ // We disable animations here because they can cause the tooltip
+ // to change shape while fading away in weird way.
+ $('#keyboard-icon').tooltip({animation: false});
$("body").on("mouseover", ".message_edit_content", function () {
$(this).closest(".message_row").find(".copy_message").show();
|
Fix passed value in wrong type
BuildResult.logs accepts None or List[str], but in the source_container
plugin, a str is passed. | @@ -124,7 +124,7 @@ class SourceContainerPlugin(BuildStepPlugin):
output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, text=True)
except subprocess.CalledProcessError as e:
self.log.error("BSI failed with output:\n%s", e.output)
- return BuildResult(logs=e.output, fail_reason='BSI utility failed build source image')
+ return BuildResult(logs=[e.output], fail_reason='BSI utility failed build source image')
self.log.debug("Build log:\n%s\n", output)
@@ -155,7 +155,7 @@ class SourceContainerPlugin(BuildStepPlugin):
shutil.rmtree(image_output_dir)
return BuildResult(
- logs=output,
+ logs=[output],
source_docker_archive=image_tar_path,
skip_layer_squash=True
)
|
Bulk add people tests: rename "Instructor" -> "intructor" (role)
Lowercased name goes on-par with what actual spelling is. | @@ -103,7 +103,7 @@ class CSVBulkUploadTestBase(TestBase):
test_host = Organization.objects.create(domain='example.com',
fullname='Test Organization')
- Role.objects.create(name='Instructor')
+ Role.objects.create(name='instructor')
Role.objects.create(name='learner')
Event.objects.create(start=datetime.date.today(),
host=test_host,
@@ -117,7 +117,7 @@ class CSVBulkUploadTestBase(TestBase):
Sample CSV data
"""
return """personal,family,email,event,role
-John,Doe,[email protected],foobar,Instructor
+John,Doe,[email protected],foobar,instructor
"""
def make_data(self):
@@ -409,11 +409,11 @@ Harry,Potter,[email protected],foobar,Helper
user is silent (ie. no Task nor Person is being created).
"""
foobar = Event.objects.get(slug="foobar")
- instructor = Role.objects.get(name="Instructor")
+ instructor = Role.objects.get(name="instructor")
Task.objects.create(person=self.harry, event=foobar, role=instructor)
csv = """personal,family,email,event,role
-Harry,Potter,[email protected],foobar,Instructor
+Harry,Potter,[email protected],foobar,instructor
"""
data, _ = upload_person_task_csv(StringIO(csv))
|
Add metavar for name parameter in subnet create
Closes-Bug: | @@ -247,6 +247,7 @@ class CreateSubnet(command.ShowOne):
parser = super(CreateSubnet, self).get_parser(prog_name)
parser.add_argument(
'name',
+ metavar='<name>',
help=_("New subnet name")
)
parser.add_argument(
|
[swarming] fix test only regression from
[email protected] | @@ -692,7 +692,7 @@ class TestTaskRunner(TestTaskRunnerBase):
policies = local_caching.CachePolicies(0, 0, 0, 0)
# Inject file 'bar' in the named cache 'foo'.
- with local_caching.NamedCache(cache_dir, policies) as cache:
+ cache = local_caching.NamedCache(cache_dir, policies)
cache.install(dest_dir, 'foo')
with open(os.path.join(dest_dir, 'bar'), 'wb') as f:
f.write('thecache')
@@ -735,7 +735,7 @@ class TestTaskRunner(TestTaskRunnerBase):
print open(os.path.join(cache_dir, 'state.json')).read()
- with local_caching.NamedCache(cache_dir, policies) as cache:
+ cache = local_caching.NamedCache(cache_dir, policies)
self.assertFalse(os.path.exists(dest_dir))
self._expect_files(
[u'c/*/bar', u'c/state.json', u'w/run_isolated_args.json'])
|
output_processors/postgres: Move logging message
Print the debug message warning about writing a large object to the
database before writing the object. | @@ -394,10 +394,10 @@ class PostgresqlResultProcessor(OutputProcessor):
self.current_large_object_uuid = uuid.uuid4()
with open(os.path.join(output_object.basepath, artifact.path)) as lobj_file:
lobj_data = lobj_file.read()
- lo_len = self.current_lobj.write(lobj_data)
- if lo_len > 50000000: # Notify if LO inserts larger than 50MB
+ if len(lobj_data) > 50000000: # Notify if LO inserts larger than 50MB
self.logger.debug(
"Inserting large object of size {}".format(lo_len))
+ lo_len = self.current_lobj.write(lobj_data)
self.cursor.execute(
self.sql_command['create_large_object'],
(
|
Fixing typos
modified: pypeit/core/flexure.py | @@ -288,7 +288,7 @@ def spec_flex_shift(obj_skyspec, arx_skyspec, arx_lines, mxshft=20, excess_shft=
msgs.error(f"Flexure compensation failed for one of your{msgs.newline()}"
f"objects. Either adjust the \"spec_maxshift\"{msgs.newline()}"
f"FlexurePar Keyword, or see the flexure documentation{msgs.newline()}"
- f"for information on how to bypass this error using the{msgs.keyword()}"
+ f"for information on how to bypass this error using the{msgs.newline()}"
f"\"excessive_shift\" keyword.{msgs.newline()}"
"https://pypeit.readthedocs.io/en/release/flexure.html")
|
CoinZoom: Document order not found on order cancels
Finished the TODO in previous commit, there is no error reported by CoinZoom if an order is not found. | @@ -533,7 +533,7 @@ class CoinzoomExchange(ExchangeBase):
except CoinzoomAPIError as e:
err = e.error_payload.get('error', e.error_payload)
self.logger().error(f"Order Cancel API Error: {err}")
- # TODO: Still need to handle order cancel errors.
+ # CoinZoom doesn't report any error if the order wasn't found so we can only handle API failures here.
self._order_not_found_records[order_id] = self._order_not_found_records.get(order_id, 0) + 1
if self._order_not_found_records[order_id] >= self.ORDER_NOT_EXIST_CANCEL_COUNT:
order_was_cancelled = True
|
Render items via a group
So we can "gray out" elements that an element can not connected to. | @@ -27,8 +27,10 @@ class ItemPainter:
def paint_item(self, item, cairo):
selection = self.selection
diagram = item.diagram
- cairo.save()
+ style = diagram.style(StyledItem(item, selection))
+ cairo.push_group()
try:
+ cairo.set_source_rgba(*style["color"])
cairo.transform(item.matrix_i2c.to_cairo())
selection = self.selection
@@ -36,7 +38,7 @@ class ItemPainter:
item.draw(
DrawContext(
cairo=cairo,
- style=diagram.style(StyledItem(item, selection)),
+ style=style,
selected=(item in selection.selected_items),
focused=(item is selection.focused_item),
hovered=(item is selection.hovered_item),
@@ -45,7 +47,8 @@ class ItemPainter:
)
finally:
- cairo.restore()
+ cairo.pop_group_to_source()
+ cairo.paint_with_alpha(1.0)
def paint(self, items, cairo):
"""Draw the items."""
|
Add dashboard search placeholder to describe search patterns
Since ^ and $ for searching from start and to end is supported, let's
tell the user about it. | $(id).DataTable({
"paging" : true,
"lengthChange" : true,
+ language: {
+ searchPlaceholder: "Use ^ and $ for start and end",
+ },
"searching" : true,
"ordering" : true,
"columnDefs": [
|
Update test_util_functions.py
Fixed the same error in two more spots. | @@ -521,12 +521,12 @@ def test_process_cache():
process_cache(cache, kernel_options, tuning_options, runner)
assert "kernel" in str(excep.value)
- with pytest.raises(ValueError) as excp:
+ with pytest.raises(ValueError) as excep:
runner.dev.name = "wrong_device"
process_cache(cache, kernel_options, tuning_options, runner)
assert "device" in str(excep.value)
- with pytest.raises(ValueError) as excp:
+ with pytest.raises(ValueError) as excep:
tuning_options.tune_params["y"] = ["a", "b"]
process_cache(cache, kernel_options, tuning_options, runner)
assert "parameter" in str(excep.value)
|
Step Size Crash Correction
Low level fix. | @@ -26,7 +26,7 @@ class LaserSettings:
self.acceleration_custom = False
self.acceleration = 1
- self.raster_step = 0
+ self.raster_step = 1
self.raster_direction = 0
self.raster_swing = False # False = bidirectional, True = Unidirectional
self.raster_preference_top = 0
@@ -258,6 +258,8 @@ class RasterCut(CutObject):
def generator(self):
step = self.settings.raster_step
+ if step < 0:
+ step = 1
direction = self.settings.raster_direction
traverse = 0
if direction == 0:
|
[setup] Synchronize pypi package 1.2 with 2.1
Parser for requirements are different in Python 2 and Python 3.
Enforce the same result in these 3 conditions. | @@ -36,7 +36,7 @@ if not python_is_supported():
# ------- setup extra_requires ------- #
extra_deps = {
# Core library dependencies
- 'eventstreams': ['sseclient>=0.0.18,!=0.0.23,!=0.0.24'],
+ 'eventstreams': ['sseclient!=0.0.23,!=0.0.24,>=0.0.18'],
'isbn': ['python-stdnum'],
'Graphviz': ['pydot>=1.2'],
'Google': ['google>=1.7'],
@@ -51,7 +51,7 @@ extra_deps = {
';python_full_version=="2.7.7" or python_full_version=="2.7.8"',
'pycparser!=2.14',
],
- 'mwoauth': ['mwoauth>=0.2.4,!=0.3.1'],
+ 'mwoauth': ['mwoauth!=0.3.1,>=0.2.4'],
'html': ['BeautifulSoup4'],
'http': ['fake_useragent'],
'flake8': [ # Due to incompatibilities between packages the order matters.
@@ -86,7 +86,7 @@ script_deps = {
'Pillow;python_version>="3.5"',
],
'states_redirect.py': ['pycountry'],
- 'weblinkchecker.py': ['memento_client>=0.5.1,!=0.6.0'],
+ 'weblinkchecker.py': ['memento_client!=0.6.0,>=0.5.1'],
'patrol.py': ['mwparserfromhell>=0.3.3'],
}
script_deps['data_ingestion.py'] = extra_deps['csv']
|
Updated tests due to changes from 4.3 -> 4.4
RequestHandler._headers[<header_name>] returns a byte string in 4.3
but a string in 4.4. This affects tests where we had to do byte-str
conversion before comparison, which is no longer needed now. | @@ -29,7 +29,7 @@ class TestProviderHandler:
await handler.prepare()
# check that X-WATERBUTLER-REQUEST-ID is valid UUID
- assert UUID(handler._headers['X-WATERBUTLER-REQUEST-ID'].decode('utf-8'), version=4)
+ assert UUID(handler._headers['X-WATERBUTLER-REQUEST-ID'], version=4)
@pytest.mark.asyncio
async def test_prepare_put(self, handler, patch_auth_handler, patch_make_provider_core,
@@ -43,7 +43,7 @@ class TestProviderHandler:
assert handler.path == WaterButlerPath('/file', prepend=None)
# check that X-WATERBUTLER-REQUEST-ID is valid UUID
- assert UUID(handler._headers['X-WATERBUTLER-REQUEST-ID'].decode('utf-8'), version=4)
+ assert UUID(handler._headers['X-WATERBUTLER-REQUEST-ID'], version=4)
@pytest.mark.asyncio
async def test_prepare_stream(self, handler):
|
Update transform_segmentation_label.py
fix segmentation label transform localization label bug. | @@ -26,9 +26,8 @@ def generate_mapping_list_txt(action_dict, out_path):
f.close()
-def segmentation_convert_localization_label(prefix_data_path, out_path,
- action_dict, fps):
- label_path = os.path.join(prefix_data_path, "train")
+def segmentation_convert_localization_label(prefix_data_path, out_path, action_dict, fps):
+ label_path = os.path.join(prefix_data_path, "groundTruth")
label_txt_name_list = os.listdir(label_path)
labels_dict = {}
@@ -37,7 +36,7 @@ def segmentation_convert_localization_label(prefix_data_path, out_path,
for label_name in tqdm(label_txt_name_list, desc='label convert:'):
label_dict = {}
label_dict["url"] = label_name.split(".")[0] + ".mp4"
- label_txt_path = os.path.join(prefix_data_path, "train", label_name)
+ label_txt_path = os.path.join(prefix_data_path, "groundTruth", label_name)
with open(label_txt_path, "r", encoding='utf-8') as f:
gt = f.read().split("\n")[:-1]
@@ -48,14 +47,14 @@ def segmentation_convert_localization_label(prefix_data_path, out_path,
for index in range(1, len(gt)):
if before_action_name != gt[index]:
boundary_index_list.append(index)
+ before_action_name = gt[index]
actions_list = []
for index in range(len(boundary_index_list) - 1):
- if gt[index] != "None":
+ if gt[boundary_index_list[index]] != "None":
action_name = gt[boundary_index_list[index]]
start_sec = float(boundary_index_list[index]) / float(fps)
- end_sec = float(boundary_index_list[index + 1]) / float(fps)
- action_id = list(action_dict.keys())[list(
- action_dict.values()).index(action_name)]
+ end_sec = float(boundary_index_list[index + 1] - 1) / float(fps)
+ action_id = action_dict[action_name]
label_action_dict = {}
label_action_dict["label_names"] = action_name
label_action_dict["start_id"] = start_sec
@@ -157,7 +156,7 @@ def main():
args.out_path)
elif args.mode == "localization":
- action_dict = load_action_dict(args.data_path)
+ action_dict = load_action_dict(args.label_path)
segmentation_convert_localization_label(args.data_path,
args.out_path,
action_dict,
|
Simplify s3.get_object_info wrapper code
SIM:
cr | @@ -42,24 +42,31 @@ def upload_file(bucket, key, file_path):
Body=fp)
+def __raise_if_bucket_is_empty(result):
+ if not result.get('Contents'):
+ raise NotFoundError('Object not found.')
+
+
def get_object_info(bucket, object_key):
- result = _make_api_call('list_objects',
+ result = _make_api_call(
+ 'list_objects',
Bucket=bucket,
- Prefix=object_key)
+ Prefix=object_key
+ )
- if 'Contents' not in result or len(result['Contents']) < 1:
- raise NotFoundError('Object not found.')
+ __raise_if_bucket_is_empty(result)
objects = result['Contents']
if len(objects) == 1:
return objects[0]
else:
# There is more than one result, search for correct one
- object_key = next((o for o in objects if o['Key'] == object_key), None)
- if object_key is None:
+ s3_object = next((s3_object for s3_object in objects if s3_object['Key'] == object_key), None)
+
+ if not s3_object:
raise NotFoundError('Object not found.')
- else:
- return object_key
+
+ return s3_object
def get_object(bucket, key):
|
Use '_' prefix instead of disabling pylint unused-argument lint
It is more precise to mark the unused parameters this way. | @@ -28,18 +28,16 @@ class StratisActions():
"""
@staticmethod
- def list_stratisd_redundancy(namespace):
+ def list_stratisd_redundancy(_namespace):
"""
List the stratisd redundancy designations.
"""
- # pylint: disable=unused-argument
for code in RedundancyCodes:
print("%s: %d" % (code.name, code.value))
@staticmethod
- def list_stratisd_version(namespace):
+ def list_stratisd_version(_namespace):
"""
List the stratisd version.
"""
- # pylint: disable=unused-argument
print("%s" % Manager.Properties.Version.Get(get_object(TOP_OBJECT)))
|
Allow visit_type to be empty
OpenMRS Encounters don't need to be associated with a Visit | @@ -278,6 +278,7 @@ class CreateVisitTask(WorkflowTask):
def run(self):
subtasks = []
start_datetime = to_timestamp(self.visit_datetime)
+ if self.visit_type:
stop_datetime = to_timestamp(
self.visit_datetime + timedelta(days=1) - timedelta(seconds=1)
)
@@ -327,8 +328,9 @@ class CreateEncounterTask(WorkflowTask):
'patient': self.person_uuid,
'form': self.openmrs_form,
'encounterType': self.encounter_type,
- 'visit': self.visit_uuid,
}
+ if self.visit_uuid:
+ encounter['visit'] = self.visit_uuid
if self.location_uuid:
encounter['location'] = self.location_uuid
if self.provider_uuid:
|
Share the same entity_info struct subclass for all AST node subclasses
TN: | @@ -1240,6 +1240,13 @@ class StructMetaclass(CompiledTypeMetaclass):
:type: Struct
"""
+ entity_info = None
+ """
+ Struct subclass to contain all entity information, except the node itself.
+
+ :type: Struct
+ """
+
def __new__(mcs, name, bases, dct):
# The two following booleans are mutually exclusive and at least one
# will be True.
@@ -2300,21 +2307,22 @@ class ASTNode(Struct):
)
@classmethod
- @memoized
def entity_info(cls):
"""
Return the entity info type, which is a record that contains semantic
information which, when added to an AST node, makes an entity.
"""
- return type(
- b'EntityInfo',
- (Struct, ), {
+ # This is manual memoization. It is necessary because memoization does
+ # not play well with class method when we want the memoization to be
+ # common to the whole class hierarchy.
+ if not StructMetaclass.entity_info:
+ StructMetaclass.entity_info = type(b'EntityInfo', (Struct, ), {
'MD': BuiltinField(
T.env_md, doc='The metadata associated to the AST node'
),
'parents_bindings': BuiltinField(EnvRebindingsType, doc=""),
- }
- )
+ })
+ return StructMetaclass.entity_info
@classmethod
@memoized
|
ColorSwatch : Improve drawing for non-zero based origins
Extract paint implementation to a separate method so it can be used by other widgets.
Don't snap starting point to multiples of `checkSize`.
Don't draw outside the bounds of `rect`. | @@ -166,41 +166,67 @@ class _Checker( QtWidgets.QWidget ) :
def paintEvent( self, event ) :
- painter = QtGui.QPainter( self )
- rect = event.rect()
-
- if self.color0 != self.color1 :
+ _Checker._paintRectangle(
+ QtGui.QPainter( self ),
+ event.rect(),
+ self.color0,
+ self.color1,
+ self.borderColor,
+ self.__borderTop,
+ self.__borderBottom,
+ self.width(),
+ self.height()
+ )
+
+ @staticmethod
+ def _paintRectangle(
+ painter,
+ rect,
+ color0,
+ color1,
+ borderColor = None,
+ borderTop = False,
+ borderBottom = False,
+ borderWidth = 0,
+ borderHeight = 0
+ ) :
+
+ if color0 != color1 :
# draw checkerboard if colours differ
checkSize = 6
- min = imath.V2i( rect.x() / checkSize, rect.y() / checkSize )
- max = imath.V2i( 1 + (rect.x() + rect.width()) / checkSize, 1 + (rect.y() + rect.height()) / checkSize )
-
- for x in range( min.x, max.x ) :
- for y in range( min.y, max.y ) :
- if ( x + y ) % 2 :
- painter.fillRect( QtCore.QRectF( x * checkSize, y * checkSize, checkSize, checkSize ), self.color0 )
+ gridSize = imath.V2i( rect.width() / checkSize + 1, rect.height() / checkSize + 1 )
+
+ for i in range( 0, gridSize.x ) :
+ for j in range( 0, gridSize.y ) :
+ offset = imath.V2i( i * checkSize, j * checkSize )
+ square = QtCore.QRectF(
+ rect.x() + offset.x,
+ rect.y() + offset.y,
+ min( rect.width() - offset.x, checkSize ),
+ min( rect.height() - offset.y, checkSize )
+ )
+ if ( i + j ) % 2 :
+ painter.fillRect( square, color0 )
else :
- painter.fillRect( QtCore.QRectF( x * checkSize, y * checkSize, checkSize, checkSize ), self.color1 )
+ painter.fillRect( square, color1 )
else :
# otherwise just draw a flat colour cos it'll be quicker
- painter.fillRect( QtCore.QRectF( rect.x(), rect.y(), rect.x() + rect.width(), rect.y() + rect.height() ), self.color0 )
+ painter.fillRect( QtCore.QRectF( rect.x(), rect.y(), rect.width(), rect.height() ), color0 )
- if self.borderColor is not None :
- w = self.width()
- h = self.height()
- pen = QtGui.QPen( self.borderColor )
+ if borderColor is not None :
+ pen = QtGui.QPen( borderColor )
lines = [
- QtCore.QLine( 0, 0, 0, h ),
- QtCore.QLine( w, 0, w, h ),
+ QtCore.QLine( 0, 0, 0, borderHeight ),
+ QtCore.QLine( borderWidth, 0, borderWidth, borderHeight ),
]
- if self.__borderTop :
- lines.append( QtCore.QLine( 0, 0, w, 0 ) )
- if self.__borderBottom :
- lines.append( QtCore.QLine( 0, h, w, h ) )
+ if borderTop :
+ lines.append( QtCore.QLine( 0, 0, borderWidth, 0 ) )
+ if borderBottom :
+ lines.append( QtCore.QLine( 0, borderHeight, borderWidth, borderHeight ) )
pen.setWidth( 4 )
painter.setPen( pen )
painter.drawLines( lines )
|
configure: expand discovered vdirs and set default
Before we would show the to be discoverd folders which are not valid
vdirs themselves. This would lead to configure creating a broken config
file (default_calendar set wrongly).
fix | @@ -34,7 +34,7 @@ import xdg
from click import Choice, UsageError, confirm, prompt
from .exceptions import FatalError
-from .settings import find_configuration_file
+from .settings import find_configuration_file, utils
logger = logging.getLogger('khal')
@@ -123,7 +123,14 @@ def choose_time_format():
def choose_default_calendar(vdirs):
- names = [name for name, _, _ in sorted(vdirs or ())]
+ names = []
+ for name, path, vtype in sorted(vdirs or ()):
+ if vtype != 'discover':
+ names.append(name)
+ else:
+ for vpath in utils.get_all_vdirs(utils.expand_path(path)):
+ names.append(utils.get_unique_name(vpath, names))
+
print("Which calendar do you want as a default calendar?")
print("(The default calendar is used when no calendar is specified.)")
print(f"Configured calendars: {', '.join(names)}")
|
Refactor --pw
The password prompt is handled outside of the add_argument(); the type=callable wasn't working as expected | @@ -133,7 +133,7 @@ def write_to_live_repo():
-def get_password(prompt='Password: ', confirm=True):
+def get_password(prompt='Password: ', confirm=False):
"""
Return the password entered by the user. If 'confirm' is True, the user is
asked to enter the previously entered password once again. If they match,
@@ -174,13 +174,19 @@ def add_targets(parsed_arguments):
repository.targets.add_target(
os.path.join(repo_targets_path, os.path.basename(target_path)))
+ # Examples of how the --pw command-line option is interpreted:
+ # repo.py --init': parsed_arguments.pw = 'pw'
+ # repo.py --init --pw my_pw: parsed_arguments.pw = 'my_pw'
+ # repo.py --init --pw: The user is prompted for a password, here.
+ if not parsed_arguments.pw:
+ parsed_arguments.pw = get_password(prompt='Enter a password for the'
+ ' top-level role keys: ', confirm=True)
+
+ # Load the top-level, non-root, keys to make a new release.
targets_private = repo_tool.import_ecdsa_privatekey_from_file(
os.path.join(DEFAULT_KEYSTORE, DEFAULT_TARGETS_KEY), parsed_arguments.pw)
-
- # Make a new release.
snapshot_private = repo_tool.import_ecdsa_privatekey_from_file(
os.path.join(DEFAULT_KEYSTORE, DEFAULT_SNAPSHOT_KEY), parsed_arguments.pw)
-
timestamp_private = repo_tool.import_ecdsa_privatekey_from_file(
os.path.join(DEFAULT_KEYSTORE, DEFAULT_TIMESTAMP_KEY), parsed_arguments.pw)
@@ -228,6 +234,14 @@ def set_top_level_keys(repository):
Generate, write, and set the top-level keys. 'repository' is modified.
"""
+ # Examples of how the --pw command-line option is interpreted:
+ # repo.py --init': parsed_arguments.pw = 'pw'
+ # repo.py --init --pw my_pw: parsed_arguments.pw = 'my_pw'
+ # repo.py --init --pw: The user is prompted for a password, here.
+ if not parsed_arguments.pw:
+ parsed_arguments.pw = get_password(prompt='Enter a password for the'
+ ' top-level role keys: ', confirm=True)
+
repo_tool.generate_and_write_ecdsa_keypair(
os.path.join(DEFAULT_KEYSTORE, DEFAULT_ROOT_KEY), password=parsed_arguments.pw)
repo_tool.generate_and_write_ecdsa_keypair(
@@ -336,9 +350,8 @@ def parse_arguments():
parser.add_argument('--role', nargs='?', type=str, const='targets',
default='targets', help='Specify a role.')
- parser.add_argument('--pw', nargs='?', type=get_password, const='pw',
- help='Specify a password for the default, top-level key'
- ' files.')
+ parser.add_argument('--pw', nargs='?', default='pw',
+ help='Specify a password for the top-level key files.')
parsed_args = parser.parse_args()
|
Always look up the session name for lsp_execute
Resolves | @@ -27,7 +27,7 @@ class LspExecuteCommand(LspTextCommand):
listener.do_signature_help_async(manual=False)
return sublime.set_timeout_async(run_async)
- session = self.session_by_name(session_name) if session_name else self.best_session(self.capability)
+ session = self.session_by_name(session_name if session_name else self.session_name)
if session and command_name:
if command_args:
self._expand_variables(command_args)
|
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.