message
stringlengths
13
484
diff
stringlengths
38
4.63k
HelpChannels: clear roles when resetting permissions Claimants will have a special role that needs to be removed rather than using member overwrites for the category.
@@ -632,18 +632,16 @@ class HelpChannels(Scheduler, commands.Cog): await category.set_permissions(member, **permissions) async def reset_send_permissions(self) -> None: - """Reset send permissions for members with it set to False in the Available category.""" + """Reset send permissions in the Available category for claimants.""" log.trace("Resetting send permissions in the Available category.") + guild = self.bot.get_guild(constants.Guild.id) - for member, overwrite in self.available_category.overwrites.items(): - if isinstance(member, discord.Member) and overwrite.send_messages is False: + # TODO: replace with a persistent cache cause checking every member is quite slow + for member in guild.members: + if self.is_claimant(member): log.trace(f"Resetting send permissions for {member} ({member.id}).") - - # We don't use the permissions helper function here as we may have to reset multiple overwrites - # and we don't want to enforce the permissions synchronization in each iteration. - await self.available_category.set_permissions(member, overwrite=None) - - log.trace(f"Ensuring channels in `Help: Available` are synchronized after permissions reset.") + role = discord.Object(constants.Roles.help_cooldown) + await member.remove_roles(role) async def reset_claimant_send_permission(self, channel: discord.TextChannel) -> None: """Reset send permissions in the Available category for the help `channel` claimant."""
Add test_create_router_set_gateway_with_fixed_ip sometimes,we need add fixed_ip to router's gateway
@@ -239,6 +239,36 @@ class RoutersTest(base.BaseRouterTest): 'enable_snat': False}) self._verify_gateway_port(router['id']) + @decorators.idempotent_id('cbe42f84-04c2-11e7-8adb-fa163e4fa634') + @test.requires_ext(extension='ext-gw-mode', service='network') + @testtools.skipUnless(CONF.network.public_network_id, + 'The public_network_id option must be specified.') + def test_create_router_set_gateway_with_fixed_ip(self): + # Don't know public_network_address, so at first create address + # from public_network and delete + port = self.admin_ports_client.create_port( + network_id=CONF.network.public_network_id)['port'] + self.admin_ports_client.delete_port(port_id=port['id']) + + fixed_ip = { + 'subnet_id': port['fixed_ips'][0]['subnet_id'], + 'ip_address': port['fixed_ips'][0]['ip_address'] + } + external_gateway_info = { + 'network_id': CONF.network.public_network_id, + 'external_fixed_ips': [fixed_ip] + } + + # Create a router and set gateway to fixed_ip + router = self.admin_routers_client.create_router( + external_gateway_info=external_gateway_info)['router'] + self.addCleanup(self.admin_routers_client.delete_router, + router_id=router['id']) + # Examine router's gateway is equal to fixed_ip + self.assertEqual(router['external_gateway_info'][ + 'external_fixed_ips'][0]['ip_address'], + fixed_ip['ip_address']) + @decorators.idempotent_id('ad81b7ee-4f81-407b-a19c-17e623f763e8') @testtools.skipUnless(CONF.network.public_network_id, 'The public_network_id option must be specified.')
Update common/chromium/dcheck.patch To adapt for minor changes in content/browser/frame_host/navigation_controller_impl.cc
@@ -62,11 +62,11 @@ index 70f1ff97b1ac..d1abd804e988 100644 } diff --git a/content/browser/frame_host/navigation_controller_impl.cc b/content/browser/frame_host/navigation_controller_impl.cc -index bebde463db6a..46dbb416ce11 100644 +index ff1d3fcb6eba..ad6578f645d4 100644 --- a/content/browser/frame_host/navigation_controller_impl.cc +++ b/content/browser/frame_host/navigation_controller_impl.cc -@@ -1015,8 +1015,10 @@ NavigationType NavigationControllerImpl::ClassifyNavigation( - return NAVIGATION_TYPE_NEW_PAGE; +@@ -1079,8 +1079,10 @@ NavigationType NavigationControllerImpl::ClassifyNavigation( + return NAVIGATION_TYPE_NEW_SUBFRAME; } - // We only clear the session history when navigating to a new page. @@ -78,7 +78,7 @@ index bebde463db6a..46dbb416ce11 100644 if (rfh->GetParent()) { // All manual subframes would be did_create_new_entry and handled above, so -@@ -1234,7 +1236,10 @@ void NavigationControllerImpl::RendererDidNavigateToNewPage( +@@ -1301,7 +1303,10 @@ void NavigationControllerImpl::RendererDidNavigateToNewPage( new_entry->GetFavicon() = GetLastCommittedEntry()->GetFavicon(); }
Use quantity_support in obs planning example. This to avoid the override of concatenate causing problems with plt.scatter.
@@ -33,13 +33,14 @@ package. # you're free at 11:00 pm local time, and you want to know if it will be up. # Astropy can answer that. # -# Make print work the same in all versions of Python, set up numpy, -# matplotlib, and use a nicer set of plot parameters: +# Import numpy and matplotlib. For the latter, use a nicer set of plot +# parameters and set up support for plotting/converting quantities. import numpy as np import matplotlib.pyplot as plt -from astropy.visualization import astropy_mpl_style +from astropy.visualization import astropy_mpl_style, quantity_support plt.style.use(astropy_mpl_style) +quantity_support() ############################################################################## @@ -140,15 +141,15 @@ plt.plot(delta_midnight, moonaltazs_July12_to_13.alt, color=[0.75]*3, ls='--', l plt.scatter(delta_midnight, m33altazs_July12_to_13.alt, c=m33altazs_July12_to_13.az, label='M33', lw=0, s=8, cmap='viridis') -plt.fill_between(delta_midnight.to('hr').value, 0, 90, +plt.fill_between(delta_midnight, 0*u.deg, 90*u.deg, sunaltazs_July12_to_13.alt < -0*u.deg, color='0.5', zorder=0) -plt.fill_between(delta_midnight.to('hr').value, 0, 90, +plt.fill_between(delta_midnight, 0*u.deg, 90*u.deg, sunaltazs_July12_to_13.alt < -18*u.deg, color='k', zorder=0) plt.colorbar().set_label('Azimuth [deg]') plt.legend(loc='upper left') -plt.xlim(-12, 12) -plt.xticks(np.arange(13)*2 -12) -plt.ylim(0, 90) +plt.xlim(-12*u.hour, 12*u.hour) +plt.xticks((np.arange(13)*2-12)*u.hour) +plt.ylim(0*u.deg, 90*u.deg) plt.xlabel('Hours from EDT Midnight') plt.ylabel('Altitude [deg]') plt.show()
Allow the user created by the Azure template to use Docker. Previously you would have to sudo to do Docker things. Should close
@@ -200,6 +200,8 @@ then echo "WARNING: Docker could not be installed! Continuing anyway!" fi +# Authorize the normal user to use Docker +sudo usermod -aG docker $AZUREUSER ############ # setup OMS
[nixrawio] Cleanup and comments Added comment describing the purpose of the global 'neo_attributes' dictionary. Uncommented some lines that were disabled when investigating bug. Removed unused import 'warnings'.
@@ -12,7 +12,6 @@ from .baserawio import (BaseRawIO, _signal_channel_dtype, _unit_channel_dtype, _event_channel_dtype) from ..io.nixio import NixIO import numpy as np -import warnings try: import nixio as nix @@ -22,6 +21,9 @@ except ImportError: nix = None +# When reading metadata properties, the following keys are ignored since they +# are used to store Neo object properties. +# This dictionary is used in the _filter_properties() method. neo_attributes = { "segment": ["index"], "analogsignal": ["units", "copy", "sampling_rate", "t_start"], @@ -191,10 +193,10 @@ class NIXRawIO(BaseRawIO): if da.type == 'neo.analogsignal' and seg_ann['signals']: # collect and group DataArrays sig_ann = seg_ann['signals'][sig_idx] - # sig_chan_ann = self.raw_annotations['signal_channels'][sig_idx] + sig_chan_ann = self.raw_annotations['signal_channels'][sig_idx] props = da.metadata.inherited_properties() sig_ann.update(self._filter_properties(props, 'analogsignal')) - # sig_chan_ann.update(self._filter_properties(props, 'analogsignal')) + sig_chan_ann.update(self._filter_properties(props, 'analogsignal')) sig_idx += 1 sp_idx = 0 ev_idx = 0
Notify user to rerun bootstrap if any repos installed in develop mode. Check/install repos immediately after arg parsing. This provides a better user experience where we prioritize the environment check and it does not give the user a false impression of failing to start spyder.
time_start = time.time() -logger.info("Executing Spyder from source checkout") - # ---- Parse command line parser = argparse.ArgumentParser( # Prepare arguments for Spyder's main script sys.argv = [sys.argv[0]] + args.spyder_options +# ---- Install sub repos + +installed_dev_repo = False +if not args.no_install: + for name in REPOS.keys(): + if not REPOS[name]['editable']: + install_repo(name) + installed_dev_repo = True + else: + logger.info("%s installed in editable mode", name) +if installed_dev_repo: + print("\n" + "*" * 79 + "\n" + + "\tOne or more repos were installed in develop mode.\n" + + "\tPlease rerun bootstrap.\n" + + "*" * 79 + "\n") + sys.exit() + +logger.info("Executing Spyder from source checkout") + # ---- Update os.environ # Store variable to be used in self.restart (restart spyder instance) logger.info("Skipping GUI toolkit detection") os.environ['QT_API'] = args.gui -# ---- Install sub repos - -if not args.no_install: - for name in REPOS.keys(): - if not REPOS[name]['editable']: - install_repo(name) - else: - logger.info("%s already installed in editable mode", name) - # ---- Check versions # Checking versions (among other things, this has the effect of setting the
Update tasks when filter on server is toggled When filter on server is enabled, we should immediately filter results for the current query on the server. When it's disabled, we need to reload the unfiltered results from the server.
@@ -478,6 +478,7 @@ function visualiserApp(luigi) { $('#serverSideCheckbox').click(function(e) { e.preventDefault(); changeState('filterOnServer', this.checked ? '1' : null); + updateTasks(); }); $("#invertCheckbox").click(function(e) {
[PR workaroud "oem" coding not on Py3.5 Use a Windows API to fetch the console output code page if Python < 3.6, where the encoding name "oem" is not yet defined (this was the original proposal in issue
@@ -249,14 +249,21 @@ def get_output(vcbat, args=None, env=None): # Ongoing problems getting non-corrupted text led to this # changing to "oem" from "mbcs" - the scripts run presumably # attached to a console, so some particular rules apply. + # Unfortunately, "oem" not defined in Python 3.5, so get another way + if sys.version_info.major == 3 and sys.version_info.minor < 6: + from ctypes import windll + + OEM = "cp{}".format(windll.kernel32.GetConsoleOutputCP()) + else: + OEM = "oem" if stderr: # TODO: find something better to do with stderr; # this at least prevents errors from getting swallowed. - sys.stderr.write(stderr.decode("oem")) + sys.stderr.write(stderr.decode(OEM)) if popen.wait() != 0: - raise IOError(stderr.decode("oem")) + raise IOError(stderr.decode(OEM)) - return stdout.decode("oem") + return stdout.decode(OEM) KEEPLIST = (
Concession to avoid uat test failure Still getting the occasional out of order output from this script. A better approach is to address the stream sync issues in the tests - but taking this approach for expediency.
from __future__ import print_function -import sys - file = "hello.txt" -print("Reading message from %s" % file, file=sys.stderr) +print("Reading message from %s" % file) print(open(file).read())
Fix title case Change capitalization to title case for consistency.
@@ -627,7 +627,7 @@ Attribute GET Form fields POST Form fields File Uploa ============================== =============== ================ ============ -File uploads +File Uploads ------------ To support file uploads, we have to change the ``<form>`` tag a bit. First, we tell the browser to encode the form data in a different way by adding an ``enctype="multipart/form-data"`` attribute to the ``<form>`` tag. Then, we add ``<input type="file" />`` tags to allow the user to select a file. Here is an example: @@ -665,7 +665,7 @@ JSON Content Some JavaScript or REST clients send ``application/json`` content to the server. The :attr:`BaseRequest.json` attribute contains the parsed data structure, if available. -The raw request body +The Raw Request Body -------------------- You can access the raw body data as a file-like object via :attr:`BaseRequest.body`. This is a :class:`BytesIO` buffer or a temporary file depending on the content length and :attr:`BaseRequest.MEMFILE_MAX` setting. In both cases the body is completely buffered before you can access the attribute. If you expect huge amounts of data and want to get direct unbuffered access to the stream, have a look at ``request['wsgi.input']``.
Update Exam validator # Conflicts: # kolibri/core/exams/serializers.py
@@ -105,12 +105,17 @@ class ExamSerializer(serializers.ModelSerializer): def validate_question_sources(self, value): for question in value: - if 'exercise_id' not in question: - raise serializers.ValidationError("Question missing 'exercise_id'") - if 'question_id' not in question: - raise serializers.ValidationError("Question missing 'question_id'") - if 'title' not in question: - raise serializers.ValidationError("Question missing 'title'") + required_fields = [ + "exercise_id", + "question_id", + "title", + "counterInExercise", + ] + for field in required_fields: + if field not in question: + raise serializers.ValidationError( + "Question missing '{}'".format(field) + ) return value def to_internal_value(self, data):
rendered_markdown: Rework CSS for markdown headings. Extracted by tabbott from the original pull request, with additional changes to document the surprising margin-top in our current implementation and avoid a bit of unnecessary CSS.
border-top: 1px solid hsl(0, 0%, 87%); } - /* Headings: We need to make sure our headings are less prominent than our sender names styling. */ + /* Headings */ h1, h2, h3, h4, h5, h6 { - font-size: 14px; font-weight: 600; line-height: 1.4; + /* No margin-top is important to make messages that start with a heading + avoid a weird blank area at the top of a message. */ margin-top: 0; margin-bottom: 5px; + } + + /* We use a modest progression of heading sizes to make them stand out + from normal next but avoid taking up too much space. */ + h1 { + font-size: 1.4em; + } + + h2 { + font-size: 1.3em; + } + + h3 { + font-size: 1.2em; + } + + h4 { + font-size: 1.1em; + } + + h5 { + font-size: 1em; + text-decoration: underline; + } + + h6 { + font-size: 1em; + font-weight: normal; text-decoration: underline; }
ubuiltins: Fix broken classmethod decorator. Since classmethod is being redefined here, it broke Sphinx. Fixes
@@ -45,6 +45,7 @@ _bool = bool _bytearray = bytearray _bytes = bytes _callable = callable +_classmethod = classmethod _complex = complex _dict = dict _float = float @@ -585,7 +586,7 @@ class int: Byte sequence that represents the integer. """ - # @classmethod + @_classmethod def from_bytes(cls, _bytes: _bytes, byteorder: Literal["little", "big"]) -> _int: """from_bytes(bytes, byteorder) -> int
Go back to random browsing when we clear category input. Just clearing the input is a bit more implicit than I'd like, so it might make sense to add a "clear" button later, but it seems those are out of fashion looking at a few other examples. Fixes
@@ -224,6 +224,9 @@ function CategoryFilter() { }) cin.addEventListener("awesomplete-close", function() { + if (cin.value === '') { + setHiddenCategoryAndNextId(this.form, cin.value); + } this.classList.remove("open"); })
secscan: make batch_size configurable For larger databases like quay.io the default batch size is too high to complete the job in a resonable time. Make the batch_size configurable so we can adjust how many scans can be in-flight for a worker
@@ -42,10 +42,8 @@ from data.database import ( db_transaction, ) - logger = logging.getLogger(__name__) - IndexReportState = namedtuple("IndexReportState", ["Index_Finished", "Index_Error"])( "IndexFinished", "IndexError" ) @@ -198,7 +196,9 @@ class V4SecurityScanner(SecurityScannerInterface): ) # 4^log10(total) gives us a scalable batch size into the billions. - batch_size = int(4 ** log10(max(10, max_id - min_id))) + batch_size = self.app.config.get( + "SECURITY_SCANNER_V4_BATCH_SIZE", int(4 ** log10(max(10, max_id - min_id))) + ) # TODO(alecmerdler): We want to index newer manifests first, while backfilling older manifests... iterator = itertools.chain(
fix get_default_mask_creator error message block -> block4
@@ -567,5 +567,5 @@ def get_mask_creator_default(mask_type: Union[str, List[int]]) -> PruningMaskCre else: raise ValueError( f"Unknown mask_type {mask_type}. Supported mask types include " - "'unstructured' and 'block'" + "'unstructured' and 'block4'" )
MAINT: fixed consistency of state order in Alphabet.get_word_alphabet [CHANGED] previous implementation produced a strange state order that meant a k-mer inferred from extrapolation of bases indices into a 1D index did not match those produced by this method. Code is simplified, uses the itertools.product and addresses the issue.
@@ -547,14 +547,9 @@ class Alphabet(Enumeration): Note that the result is not a JointEnumeration object, and cannot unpack its indices. However, the items in the result _are_ all strings. """ - crossproduct = [""] - for a in range(word_length): - n = [] - for c in crossproduct: - for m in self: - n.append(m + c) - crossproduct = n - return Alphabet(crossproduct, moltype=self.moltype) + states = (list(self),) * word_length + cross_product = ["".join(combo) for combo in product(*states)] + return Alphabet(cross_product, moltype=self.moltype) def from_seq_to_array(self, sequence): """Returns an array of indices corresponding to items in sequence.
Change text for loop device The new text will be more helpful for most users.
@@ -116,7 +116,7 @@ class UdiskieMenu(object): """ _quit_label = _('Quit') - _losetup_label = _('Setup loop device') + _losetup_label = _('Mount disc image') def __init__(self, daemon, icons, actions, flat=True): """
Version 0.8.4 New release
@@ -19,7 +19,7 @@ with open(path.join(here, 'README.md'), encoding='utf-8') as f: setup( name='pyspedas', - version='0.8.3', + version='0.8.4', description='Python Space Physics Environment Data Analysis \ Software (SPEDAS)', long_description=long_description,
Minor update to README.md Fixed image not found on sometimes travis status m added header and updated existing headers to subheaers
-[![Build Status](https://travis-ci.org/ReFirmLabs/binwalk.png)](https://travis-ci.org/ReFirmLabs/binwalk) +# Binwalk -Description -=========== +[![Build Status](https://travis-ci.org/ReFirmLabs/binwalk.svg?branch=master)](https://travis-ci.org/ReFirmLabs/binwalk) Binwalk is a fast, easy to use tool for analyzing, reverse engineering, and extracting firmware images. -Installation -============ - +## Installation Binwalk follows the standard Python installation procedure: ```bash @@ -23,8 +20,7 @@ $ sudo apt-get install python-lzma For instructions on installing other optional dependencies, see [INSTALL.md](https://github.com/ReFirmLabs/binwalk/blob/master/INSTALL.md). -Usage -===== +## Usage Basic usage is simple:
Update ek_router.txt Sorted-uniqed list from ```pastebin.com```
apps8kjdjglkdfg.herokuapp.com dns0101.herokuapp.com + +# Reference: https://twitter.com/david_jursa/status/1131487385034870784 +# Reference: https://pastebin.com/s98awS0E + +adaranth.com +ailtumty.net +baipagid.com +bestadbid.com +blatwalm.com +bodelen.com +consoupow.com +constintptr.com +dawmal.com +deloplen.com +deloton.com +dexessee.com +dolohen.com +dspmulti.com +ellcurvth.com +gestyy.com +givirsou.net +haupsoti.net +kerumal.com +kirgeemo.com +naustoch.net +otefauks.link +oupushee.com +paibopse.com +pegloang.com +quintag.com +rotumal.com +sifuglie.com +soagitet.net +stremanp.com +ucheephu.com +vexacion.com +voastauz.net +whiceega.com +wussucko.com +viid.me +yealnk.com + +# Generic trails + +/afu.php?zoneid= +/1/1407888/?var= +/api/reverse?var=
comments for the commands above the command Easier to read.
@@ -32,12 +32,12 @@ Deletes old revisions. It can be run regularly to keep revision history manageab .. code:: bash ./manage.py deleterevisions - ./manage.py deleterevisions your_app.YourModel --days=30 # keep any changes from last 30 days - ./manage.py deleterevisions your_app.YourModel --keep=30 + ./manage.py deleterevisions your_app.YourModel --days=30 # keep 30 most recent changes for each item. - ./manage.py deleterevisions your_app.YourModel --keep=3 --days=30 + ./manage.py deleterevisions your_app.YourModel --keep=30 # Keep anything from last 30 days and at least 3 from older changes. + ./manage.py deleterevisions your_app.YourModel --keep=3 --days=30 Run ``./manage.py deleterevisions --help`` for more information.
Fixing comments in facets_normal() Removing one seemingly extraneous comment about the sum of face areas. Updateing one comment to refer to the largest face, rather than the first face.
@@ -1627,9 +1627,8 @@ class Trimesh(Geometry): return np.array([]) area_faces = self.area_faces - # sum the area of each group of faces represented by facets - # the face index of the first face in each facet + # the face index of the largest face in each facet index = np.array([i[area_faces[i].argmax()] for i in self.facets]) # (n,3) float, unit normal vectors of facet plane
Remove unused get_critical_path_timings() API ### Problem As mentioned here this method is not used internally or by known consumers of RunTracker (mainly toolchain plugin at this point) ### Solution Remove unused method ### Result Reduces the surface area for the RunTracker API. minimal surface area for public API is desirable
@@ -14,7 +14,7 @@ from typing import Dict, List, Optional, Tuple from pants.base.exiter import PANTS_FAILED_EXIT_CODE, PANTS_SUCCEEDED_EXIT_CODE, ExitCode from pants.base.run_info import RunInfo -from pants.base.workunit import WorkUnit, WorkUnitLabel +from pants.base.workunit import WorkUnit from pants.engine.internals.native import Native from pants.goal.aggregated_timings import AggregatedTimings from pants.option.config import Config @@ -286,35 +286,6 @@ class RunTracker(Subsystem): self.self_timings.add_timing(path, self_time, is_tool) self.outcomes[path] = workunit.outcome_string(workunit.outcome()) - def get_critical_path_timings(self): - """Get the cumulative timings of each goal and all of the goals it (transitively) depended - on.""" - setup_workunit = WorkUnitLabel.SETUP.lower() - transitive_dependencies = dict() - raw_timings = dict() - for entry in self.cumulative_timings.get_all(): - raw_timings[entry["label"]] = entry["timing"] - - critical_path_timings = AggregatedTimings() - - def add_to_timings(goal, dep): - tracking_label = get_label(goal) - timing_label = get_label(dep) - critical_path_timings.add_timing(tracking_label, raw_timings.get(timing_label, 0.0)) - - def get_label(dep): - return f"{RunTracker.DEFAULT_ROOT_NAME}:{dep}" - - # Add setup workunit to critical_path_timings manually, as its unaccounted for, otherwise. - add_to_timings(setup_workunit, setup_workunit) - - for goal, deps in transitive_dependencies.items(): - add_to_timings(goal, goal) - for dep in deps: - add_to_timings(goal, dep) - - return critical_path_timings - def get_options_to_record(self) -> dict: recorded_options = {} scopes = self.options.stats_option_scopes_to_record
kindergarten-garden: update tests to version 1.0.0 Renamed existing tests to match test case descriptions. Deleted and added test cases to bring test list into agreement with data. Added missing test version comment. Closes
@@ -2,14 +2,26 @@ import unittest from kindergarten_garden import Garden +# Tests adapted from `problem-specifications//canonical-data.json` @ v1.0.0 + class KindergartenGardenTests(unittest.TestCase): - def test_alices_garden(self): + def test_garden_with_single_student(self): self.assertEqual( Garden("RC\nGG").plants("Alice"), "Radishes Clover Grass Grass".split()) - def test_bob_and_charlies_gardens(self): + def test_different_garden_with_single_student(self): + self.assertEqual( + Garden("VC\nRC").plants("Alice"), + "Violets Clover Radishes Clover".split()) + + def test_garden_with_two_students(self): + garden = Garden("VVCG\nVVRC") + self.assertEqual( + garden.plants("Bob"), "Clover Grass Radishes Clover".split()) + + def test_multiple_students_for_the_same_garden_with_three_students(self): garden = Garden("VVCCGG\nVVCCGG") self.assertEqual(garden.plants("Bob"), ["Clover"] * 4) self.assertEqual(garden.plants("Charlie"), ["Grass"] * 4) @@ -26,6 +38,7 @@ class KindergartenGardenTests(unittest.TestCase): self.assertEqual( garden.plants("Larry"), "Grass Violets Clover Violets".split()) + # Additional tests for this track def test_disordered_test(self): garden = Garden( "VCRRGVRG\nRVGCCGCV",
docs: warning for remove subtree [skip ci]
@@ -238,7 +238,10 @@ def validate_loop(doctype, name, lft, rgt): def remove_subtree(doctype: str, name: str, throw=True): - """Remove doc and all its children.""" + """Remove doc and all its children. + + WARN: This does not run any controller hooks for deletion and deletes them with raw SQL query. + """ frappe.has_permission(doctype, ptype="delete", throw=throw) # Determine the `lft` and `rgt` of the subtree to be removed.
[sync] update `last_reindex` timestamp see and
@@ -2619,6 +2619,7 @@ class SyncEngine: if last_cursor == "": # We are starting from the beginning, do a full indexing. logger.info("Fetching remote Dropbox") + self._state.set("sync", "last_reindex", time.time()) changes_iter = self.client.list_folder_iterator("/", recursive=True) else: # Pick up where we left off. This may be an interrupted indexing /
Fix BigBrother embeds Move text from footer to description.
@@ -298,8 +298,7 @@ class WatchChannel(metaclass=CogABCMeta): message_jump = f"in [#{msg.channel.name}]({msg.jump_url})" footer = f"Added {time_delta} by {actor} | Reason: {reason}" - embed = Embed(description=f"{msg.author.mention} {message_jump}") - embed.set_footer(text=textwrap.shorten(footer, width=256, placeholder="...")) + embed = Embed(description=f"{msg.author.mention} {message_jump}\n\n{footer}") await self.webhook_send(embed=embed, username=msg.author.display_name, avatar_url=msg.author.display_avatar.url)
discoveryplus: identify free and premium better fixes:
# ex:ts=4:sw=4:sts=4:et # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +import datetime import hashlib import logging import random @@ -18,6 +19,7 @@ REALMS = {"discoveryplus.se": "dplayse", "discoveryplus.no": "dplayno", "discove class Dplay(Service): supported_domains = ["discoveryplus.se", "discoveryplus.no", "discoveryplus.dk"] + packages = [] def get(self): parse = urlparse(self.url) @@ -134,7 +136,7 @@ class Dplay(Service): if not self._token(): logging.error("Something went wrong getting token for requests") - premium = self._checkpremium() + self._getpackages() urllocal = "" if self.domain in ["dplay.dk", "dplay.no"]: @@ -175,7 +177,7 @@ class Dplay(Service): if i["type"] != "video": continue if i["attributes"]["videoType"] == "EPISODE": - if not premium and "Free" not in i["attributes"]["packages"]: + if not self._playablefile(i["attributes"]["availabilityWindows"]): continue episodes.append("https://www.{}/videos/{}".format(self.domain, i["attributes"]["path"])) page += 1 @@ -202,9 +204,24 @@ class Dplay(Service): return False return True - def _checkpremium(self) -> bool: + def _getpackages(self): res = self.http.get("https://disco-api.{}/users/me".format(self.domain), headers={"authority": "disco-api.{}".format(self.domain)}) if res.status_code < 400: - if "premium" in res.json()["data"]["attributes"]["products"]: - return True - return False + self.packages.extend(res.json()["data"]["attributes"]["packages"]) + + def _playablefile(self, needs): + playable = False + now = datetime.datetime.utcnow() + for package in self.packages: + for need in needs: + if package != need["package"]: + continue + start = datetime.datetime.strptime(need["playableStart"], "%Y-%m-%dT%H:%M:%SZ").replace(tzinfo=None) + if now > start: + if "playableEnd" in need: + end = datetime.datetime.strptime(need["playableEnd"], "%Y-%m-%dT%H:%M:%SZ").replace(tzinfo=None) + if now < end: + playable = True + else: + playable = True + return playable
Made the error message translatable Thanks for highlighting this.
@@ -177,7 +177,7 @@ frappe.request.call = function(opts) { opts.error_callback && opts.error_callback(); }, 502: function(xhr) { - frappe.msgprint("Internal Server Error"); + frappe.msgprint(__("Internal Server Error")); } };
Update apt_hangover.txt Update for Reference and Aliases fields.
# Copyright (c) 2014-2020 Maltrail developers (https://github.com/stamparm/maltrail/) # See the file 'LICENSE' for copying permission -# Aliases: monsoon, neon, viceroy tiger +# Aliases: backconfig, monsoon, neon, viceroy tiger # Reference: https://unit42.paloaltonetworks.com/updated-backconfig-malware-targeting-government-and-military-organizations/ # Reference: https://twitter.com/blackorbird/status/1260217348792844289 # Reference: https://twitter.com/K_N1kolenko/status/1187339471647313921 # Reference: https://twitter.com/ccxsaber/status/1187573497851068417 # Reference: https://www.virustotal.com/gui/file/d87b875b8641c538f90fe68cad4e9bdc89237dba137e934f80996e8731059861/detection +# Reference: https://otx.alienvault.com/pulse/5ebac662ee27db27e3174795 http://185.203.119.184 http://212.114.52.148
Update sentinel-2-l2a-cogs.yaml Tutorial showing how to manage COGs on AWS using Lambda service. It provides software too:
@@ -61,6 +61,9 @@ DataAtWork: - Title: STAC, COG, Python and QGIS URL: http://www.acgeospatial.co.uk/stac-cog-python-and-qgis/ AuthorName: Andrew Cutts + - Title: How to process Sentinel-2 data in a serverless Lambda on AWS? + URL: https://www.linkedin.com/pulse/how-sunny-my-city-better-process-stac-sentinel2-lambda-alvaro-huarte + AuthorName: Alvaro Huarte Publications: - Title: STAC and Sentinel-2 COGs (ESIP Summer Meeting 2020) URL: https://docs.google.com/presentation/d/14NsKFZ3UF2Swwx_9L7sPMX9ccFUK1ruQyZXWK9Cz4L4/edit?usp=sharing
small fix to correctly pad block shape in pytorch block pruner Reviewers: mark.kurtz Subscribers: #core
@@ -363,7 +363,7 @@ class BlockSparsityMaskCreator(GroupedSparsityMaskCreator): """ block_shape = self._block_shape n_dims = len(tens_shape) - if len(block_shape) < n_dims: # Conv will have block shape [X, Y, 1, ..., 1] + while len(block_shape) < n_dims: # Conv will have block shape [X, Y, 1, ..., 1] block_shape.append(1) for idx, shape in enumerate(block_shape): if shape == -1:
Update luminositylinkrat.txt Cleaned up some missed orphan records.
@@ -1320,7 +1320,6 @@ dode32.is-slick.com dogcattree.fishdns.com doly.porche.ml doma.kostirez1.tk -domain.com donclef.duckdns.org donmackay.ddns.net donsage.ddns.net @@ -1344,7 +1343,6 @@ drust4nn.no-ip.org dsfds38.fishdns.com dsiminski.no-ip.org dubbel.crabdance.com -duckdns duckgolden.duckdns.org ducksanddicks.duckdns.org duckyforyou22.duckdns.org @@ -1450,7 +1448,7 @@ fesnon.ddns.net fetzhf.duckdns.org fffffff.duckdns.org fgsaz1.mooo.com -fgsazUKMAIDEN.mooo.com +fgsazukmaiden.mooo.com fhcowned.duckdns.org fhcowned.tk file.it-share.ro @@ -1460,7 +1458,6 @@ finessefather.fishdns.com fingers.noip.me finishiscoolkappa.duckdns.org firebrandhost.mysecuritycamera.com -firehong.Duckdns.org firehong.duckdns.org firewallbd3.tinydns.tech firewallbe.appleupdate.xyz
fix: allow truncated image files Reason: "IOError: broken data stream when reading image file" Reference:
@@ -28,7 +28,7 @@ from frappe import conf from frappe.utils.nestedset import NestedSet from frappe.model.document import Document from frappe.utils import strip -from PIL import Image, ImageOps +from PIL import Image, ImageFile, ImageOps from six import StringIO, string_types from six.moves.urllib.parse import unquote, quote from six import text_type, PY2 @@ -38,9 +38,11 @@ class MaxFileSizeReachedError(frappe.ValidationError): pass -class FolderNotEmpty(frappe.ValidationError): pass +class FolderNotEmpty(frappe.ValidationError): + pass exclude_from_linked_with = True +ImageFile.LOAD_TRUNCATED_IMAGES = True class File(Document):
langkit.dsl: refactor base DSLType info collection TN:
@@ -38,6 +38,16 @@ class DSLType(object): ctx_message = 'in {}'.format(cls._name.camel) return Context(ctx_message, cls._location) + @staticmethod + def _import_base_type_info(name, location, dct): + """ + Metaclass helper. Register in `dct` the name, location and doc + information for the class to be built. + """ + dct['_name'] = names.Name.from_camel(name) + dct['_location'] = location + dct['_doc'] = dct.get('__doc__') + _type = None """ Link to the StructType/ASTNodeType subclass corresponding to this subclass. @@ -201,10 +211,7 @@ class _StructMetaclass(type): ) fields = Struct.collect_fields(name, location, dct, _UserField) - - dct['_name'] = names.Name.from_camel(name) - dct['_location'] = location - dct['_doc'] = dct.get('__doc__') + DSLType._import_base_type_info(name, location, dct) dct['_fields'] = fields @@ -384,9 +391,7 @@ class _ASTNodeMetaclass(type): ' (here: {})'.format(', '.join(sorted(syntax_fields))) ) - dct['_name'] = names.Name.from_camel(name) - dct['_location'] = location - dct['_doc'] = dct.get('__doc__') + DSLType._import_base_type_info(name, location, dct) dct['_fields'] = fields dct['_repr_name'] = repr_name dct['_base'] = base @@ -584,20 +589,15 @@ class _EnumNodeMetaclass(type): 'The "alternatives" field must contain a list of strings' ) - doc = dct.get('__doc__') alts = [EnumNode.Alternative(names.Name.from_lower(alt)) for alt in alternatives] fields = EnumNode.collect_fields(name, location, dct, (_UserField, PropertyDef)) - dct = { - '_name': names.Name.from_camel(name), - '_location': location, - '_doc': doc, - '_fields': fields, - '_alternatives': alts, - '_qualifier': qualifier, - } + DSLType._import_base_type_info(name, location, dct) + dct['_fields'] = fields + dct['_alternatives'] = alts + dct['_qualifier'] = qualifier # Make Alternative instances available as EnumNode class attributes for # a convenient way to create parsers for them. @@ -737,8 +737,7 @@ class _EnumMetaclass(type): _base_cls = None def __new__(mcs, name, bases, dct): - dct['_name'] = names.Name.from_camel(name) - dct['_location'] = extract_library_location() + DSLType._import_base_type_info(name, extract_library_location(), dct) cls = type.__new__(mcs, name, bases, dct)
Mount libpod container volume into collectd container The collectd-libpod-stats plugin requires additional libpod volumes to be mounted into the collectd contianer in order to find and track running containers. This mounts the only additional volume necessary
@@ -683,6 +683,7 @@ outputs: - {get_attr: [ContainersCommon, volumes]} - - /var/lib/kolla/config_files/collectd.json:/var/lib/kolla/config_files/config.json:ro + - /var/lib/containers/storage/overlay-containers:/var/lib/containers/storage/overlay-containers:ro - /var/lib/config-data/puppet-generated/collectd:/var/lib/kolla/config_files/src:ro - /var/log/containers/collectd:/var/log/collectd:rw,z - /run/:/run:rw
Cleanup __init__ Handle errors in retrieving origin loggers Simplify some branching logic
@@ -6,7 +6,7 @@ import traceback from datetime import datetime from bson import json_util -from flask import Flask, request, session +from flask import Flask, session from flask_mail import Mail from werkzeug.contrib.fixers import ProxyFix @@ -21,15 +21,27 @@ import api.routes.stats import api.routes.team import api.routes.user from api.annotations import jsonify -from api.common import (InternalException, SevereInternalException, WebError, - WebException, WebSuccess) +from api.common import ( + InternalException, + SevereInternalException, + WebError, + WebException, + WebSuccess +) + +log = logging.getLogger(__name__) def get_origin_logger(exception): """Get the logger for the module where an exception was raised.""" + try: origin = inspect.getmodule(inspect.trace()[-1]).__name__ origin_logger = logging.getLogger(origin) return origin_logger + except Exception as e: + log.error('Failed to get origin logger for exception: ' + str(e) + + ' - returning fallback logger') + return logging.getLogger('origin_fallback') def update_mail_config(app): @@ -117,20 +129,16 @@ def create_app(test_config=None): response.headers.add('Cache-Control', 'no-store') if api.auth.is_logged_in(): # Flask 1.0+ bug loads config SESSION_COOKIE_DOMAIN - # correctly as None but later converts it to bool false. (@todo) + # correctly as None but later converts it to bool false. domain = app.config['SESSION_COOKIE_DOMAIN'] if not domain: domain = None - if 'token' in session: - response.set_cookie('token', session['token'], domain=domain) - else: + if 'token' not in session: csrf_token = api.common.token() session['token'] = csrf_token - response.set_cookie('token', csrf_token, domain=domain) + response.set_cookie('token', session['token'], domain=domain) - # JB: This is a hack. We need a better solution (@todo) - if request.path[0:19] != "/api/autogen/serve/": response.mimetype = 'application/json' return response
(run-hijacking-2) Extract do_launch_for_created_run from _launch_pipeline_execution_for_created_run Summary: We are going to use this function to hijack start_pipeline_execution_for_created_run codepath. Had to change the code to throw a UserFacingGraphQLError rather than just return it. Depends on D3092 Test Plan: BK Reviewers: alangenfeld, max
@@ -110,7 +110,7 @@ def _launch_pipeline_execution(graphene_info, execution_params, is_reexecuted=Fa ) -def _launch_pipeline_execution_for_created_run(graphene_info, run_id): +def do_launch_for_created_run(graphene_info, run_id): check.inst_param(graphene_info, 'graphene_info', ResolveInfo) check.str_param(run_id, 'run_id') @@ -118,7 +118,9 @@ def _launch_pipeline_execution_for_created_run(graphene_info, run_id): instance = graphene_info.context.instance pipeline_run = instance.get_run_by_id(run_id) if not pipeline_run: - return graphene_info.schema.type_named('PipelineRunNotFoundError')(run_id) + raise UserFacingGraphQLError( + graphene_info.schema.type_named('PipelineRunNotFoundError')(run_id) + ) external_pipeline = get_external_pipeline_or_raise( graphene_info, pipeline_run.pipeline_name, pipeline_run.solid_subset @@ -158,19 +160,31 @@ def _launch_pipeline_execution_for_created_run(graphene_info, run_id): instance.report_run_failed(pipeline_run) - return DauphinPipelineConfigValidationInvalid.for_validation_errors( + raise UserFacingGraphQLError( + DauphinPipelineConfigValidationInvalid.for_validation_errors( external_pipeline, validated_config.errors ) + ) try: - pipeline_run = instance.launch_run(pipeline_run.run_id) + return instance.launch_run(pipeline_run.run_id) except DagsterLaunchFailedError: error = serializable_error_info_from_exc_info(sys.exc_info()) instance.report_engine_event( error.message, pipeline_run, EngineEventData.engine_error(error), ) instance.report_run_failed(pipeline_run) + # https://github.com/dagster-io/dagster/issues/2508 + # We should return a proper GraphQL error here + raise + + +def _launch_pipeline_execution_for_created_run(graphene_info, run_id): + check.inst_param(graphene_info, 'graphene_info', ResolveInfo) + check.str_param(run_id, 'run_id') + + run = do_launch_for_created_run(graphene_info, run_id) return graphene_info.schema.type_named('LaunchPipelineRunSuccess')( - run=graphene_info.schema.type_named('PipelineRun')(pipeline_run) + run=graphene_info.schema.type_named('PipelineRun')(run) )
make edl shaders disabled by default with an option to enable them These were copied into the source repo a long time ago, but now these features are available in VTK. I'm keeping them here for now but probably should just remove it.
@@ -23,6 +23,9 @@ set(EXTRA_SRCS ##### +option(USE_EDL_SHADERS OFF) +if(USE_EDL_SHADERS) + set(GLSL_RESOURCES_DIR edl_resources/Shaders ) @@ -54,6 +57,8 @@ foreach(file ${GLSL_SRCS}) set(EXTRA_SRCS ${EXTRA_SRCS} ${res}) endforeach(file) +endif() + #####
fix: dont update apps/{app}/modules.txt if called from uninstall operation
@@ -42,6 +42,10 @@ class ModuleDef(Document): def on_trash(self): """Delete module name from modules.txt""" + + if frappe.flags.in_uninstall: + return + modules = None if frappe.local.module_app.get(frappe.scrub(self.name)): with open(frappe.get_app_path(self.app_name, "modules.txt"), "r") as f:
modify get_version() tests in test_anim.py remove unused monkeypatch fixtures convert string paths to Path() make test function names more explicit use standard assert for string comparison
#!/usr/bin/env python # -*- coding: utf-8 -*- # (c) The James Hutton Institute 2017-2019 -# (c) University of Strathclyde 2019-2020 +# (c) University of Strathclyde 2019-2021 # Author: Leighton Pritchard # # Contact: # The MIT License # # Copyright (c) 2017-2019 The James Hutton Institute -# Copyright (c) 2019-2020 University of Strathclyde +# Copyright (c) 2019-2021 University of Strathclyde # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal @@ -44,18 +44,15 @@ These tests are intended to be run from the repository root using: pytest -v """ -import os - from pathlib import Path from typing import List, NamedTuple, Tuple import pandas as pd import pytest -import unittest from pandas.util.testing import assert_frame_equal -from pyani import anim, pyani_files, pyani_tools +from pyani import anim, pyani_files class DeltaDir(NamedTuple): @@ -148,37 +145,31 @@ def mummer_cmds_four(path_file_four): ) -# Create object for accessing unittest assertions -assertions = unittest.TestCase("__init__") - - # Test get_version() # Test case 1: there is no executable -def test_get_version_1(executable_missing, monkeypatch): +def test_get_version_no_exe(executable_missing): """Test behaviour when there is no file at the specified executable location.""" - test_file_1 = "/non/existent/file" - assertions.assertEqual( - anim.get_version(test_file_1), f"No nucmer executable at {test_file_1}" - ) + test_file_1 = Path("/non/existent/nucmer") + assert anim.get_version(test_file_1) == f"No nucmer executable at {test_file_1}" # Test case 2: there is a file, but it is not executable -def test_get_version_2(executable_not_executable, monkeypatch): +def test_get_version_exe_not_executable(executable_not_executable): """Test behaviour when the file at the executable location is not executable.""" - test_file_2 = "/non/executable/file" - assertions.assertEqual( - anim.get_version(test_file_2), - f"nucmer exists at {test_file_2} but not executable", + test_file_2 = Path("/non/executable/nucmer") + assert ( + anim.get_version(test_file_2) + == f"nucmer exists at {test_file_2} but not executable" ) # Test case 3: there is an executable file, but the version can't be retrieved -def test_get_version_3(executable_without_version, monkeypatch): +def test_get_version_exe_no_version(executable_without_version): """Test behaviour when the version for the executable can not be retrieved.""" - test_file_3 = "/missing/version/file" - assertions.assertEqual( - anim.get_version(test_file_3), - f"nucmer exists at {test_file_3} but could not retrieve version", + test_file_3 = Path("/missing/version/nucmer") + assert ( + anim.get_version(test_file_3) + == f"nucmer exists at {test_file_3} but could not retrieve version" )
Wrap scheduler definition uploader with credential provider Required to set the right credentials when uploading custom scheduler definition
@@ -1134,6 +1134,7 @@ def upload_scheduler_plugin_definitions(s3_bucket_factory_shared, request) -> di ) scheduler_definition_dict[plugin_name] = {} for region, s3_bucket in s3_bucket_factory_shared.items(): + with aws_credential_provider(region, request.config.getoption("credential")): scheduler_plugin_definition_url = scheduler_plugin_definition_uploader( scheduler_definition, s3_bucket, plugin_name, region )
Update EnvSpec.check_properties' docstring TN:
@@ -203,10 +203,10 @@ class EnvSpec(object): def check_properties(self): """ - Method call by CompileCtx.compute_properties. Used to check that - properties generated by the env spec are conforming. This relies on - type information and property attributes (privacy, implicit envs), so - it must run only after these can be computed. + Method to implement an ASTNode pass, which checks that properties + generated by the env spec are conforming. This relies on type + information and property attributes (privacy, implicit envs), so it + must run only after these can be computed. :rtype: bool """
updated conda installation instructions Removes pip install instructions for `pmdarima` and `tbats` as they are now included in conda install by default
@@ -55,8 +55,8 @@ To install ``sktime`` with maximum dependencies, including soft dependencies, in conda install -c conda-forge sktime-all-extras -Note: currently this does not include dependencies ``catch-22``, ``pmdarima``, and ``tbats``. -As these packages are not available on ``conda-forge``, they must be installed via ``pip`` if desired. +Note: currently this does not include the dependency ``catch-22``. +As this package is not available on ``conda-forge``, it must be installed via ``pip`` if desired. Contributions to remedy this situation are appreciated.
Update index.html with barrosdaniel profile link. Update index.html with barrosdaniel profile link.
<a href="https://github.com/ArquiteturaJoel">GitHub/ArquiteturaJoel</a></li> <li>Hi, I am Karthick Thoppe and my profile link is <a href="https://github.com/karthicktv">GitHub/karthicktv</a></li> + <li>Hi, I am Daniel Barros and my profile link is + <a href="https://github.com/barrosdaniel">GitHub/barrosdaniel</a></li> </b> </ol> </div>
extract step key data into column for postgres event log table Summary: same as D2539, but postgres overrides the default `SqlEventLogStorage::store_event` method Test Plan: bk Reviewers: sashank
@@ -97,8 +97,11 @@ def store_event(self, event): check.inst_param(event, 'event', EventRecord) dagster_event_type = None + step_key = event.step_key + if event.is_dagster_event: dagster_event_type = event.dagster_event.event_type_value + step_key = event.dagster_event.step_key run_id = event.run_id @@ -109,6 +112,7 @@ def store_event(self, event): event=serialize_dagster_namedtuple(event), dagster_event_type=dagster_event_type, timestamp=datetime.datetime.fromtimestamp(event.timestamp), + step_key=step_key, ) result_proxy = conn.execute( event_insert.returning(
avoid Py 3.8 for BanditIntegrationTest.test_3rdparty_plugin ### Problem As described in `BanditIntegrationTest.test_3rdparty_plugin` is flaky because its success depends on specific Python version that the test runs the `bandit-aws` plugin with. ### Solution Do not allow Python 3.8 to be used to run the test. ### Result Test passes.
@@ -174,7 +174,10 @@ class BanditIntegrationTest(ExternalToolTestBase): def test_3rdparty_plugin(self) -> None: target = self.make_target_with_origin( - [FileContent("bad.py", b"aws_key = 'JalrXUtnFEMI/K7MDENG/bPxRfiCYzEXAMPLEKEY'\n")] + [FileContent("bad.py", b"aws_key = 'JalrXUtnFEMI/K7MDENG/bPxRfiCYzEXAMPLEKEY'\n")], + # NB: `bandit-aws` does not currently work with Python 3.8. See + # https://github.com/pantsbuild/pants/issues/10545. + interpreter_constraints="CPython>=3.6,<3.8", ) result = self.run_bandit( [target], additional_args=["--bandit-extra-requirements=bandit-aws"]
Update integration.py Stated which version is required in the error message
@@ -20,6 +20,7 @@ def _warn_on_old_setuptools(_version=setuptools.__version__): ERROR: setuptools=={_version} is used in combination with setuptools_scm>=6.x Your build configuration is incomplete and previously worked by accident! +setuptools_scm requires setuptools>=45 This happens as setuptools is unable to replace itself when a activated build dependency
adding fields attribute to photo.Photo and photo.Photoalbum docstring update
@@ -16,6 +16,7 @@ class Photoalbum(PlexPartialObject): addedAt (datetime): Datetime this item was added to the library. art (str): Photo art (/library/metadata/<ratingkey>/art/<artid>) composite (str): Unknown + fields (list): List of :class:`~plexapi.media.Field`. guid (str): Unknown (unique ID) index (sting): Index number of this album. key (str): API URL (/library/metadata/<ratingkey>). @@ -37,6 +38,7 @@ class Photoalbum(PlexPartialObject): self.addedAt = utils.toDatetime(data.attrib.get('addedAt')) self.art = data.attrib.get('art') self.composite = data.attrib.get('composite') + self.fields = self.findItems(data, etag='Field') self.guid = data.attrib.get('guid') self.index = utils.cast(int, data.attrib.get('index')) self.key = data.attrib.get('key') @@ -81,6 +83,7 @@ class Photo(PlexPartialObject): TAG (str): 'Photo' TYPE (str): 'photo' addedAt (datetime): Datetime this item was added to the library. + fields (list): List of :class:`~plexapi.media.Field`. index (sting): Index number of this photo. key (str): API URL (/library/metadata/<ratingkey>). listType (str): Hardcoded as 'photo' (useful for search filters). @@ -104,6 +107,7 @@ class Photo(PlexPartialObject): """ Load attribute values from Plex XML response. """ self.listType = 'photo' self.addedAt = utils.toDatetime(data.attrib.get('addedAt')) + self.fields = self.findItems(data, etag='Field') self.index = utils.cast(int, data.attrib.get('index')) self.key = data.attrib.get('key') self.originallyAvailableAt = utils.toDatetime(
Update json_schema/core/file/file_core.json Changed to `DCP/2 Ingest` for consistency with other values
"LungMAP", "Zenodo", "Publication", - "HCA Ingest" + "DCP/2 Ingest" ], "user_friendly": "File source", "guidelines": "Should be one of: DCP/2 Analysis, Contributor, ArrayExpress, HCA Release, GEO, SCEA, SCP, DCP/1 Matrix Service, LungMAP, Zenodo, Publication",
Tool: remove abstract decorator for virtual tool Virtual tools may not need to implement the command method. The abstract class requires the abstract method. Removing abstract class to get it optional.
from __future__ import annotations import pathlib -from abc import ABC, abstractmethod from hashlib import sha256 from typing import TYPE_CHECKING, Any, Dict, List, Optional, Type, TypeVar, Union, cast @@ -20,7 +19,7 @@ if TYPE_CHECKING: T = TypeVar("T") -class Tool(ABC, InitializableMixin): +class Tool(InitializableMixin): """ The base class, which wraps an executable, package, or scripts on a node. A tool can be installed, and execute on a node. When a tool is needed, call @@ -67,7 +66,6 @@ class Tool(ABC, InitializableMixin): self.__cached_results: Dict[str, Process] = {} @property - @abstractmethod def command(self) -> str: """ Return command string, which can be run in console. For example, echo.
HelpChannels: move message None check inside `match_bot_embed` It was being done repeatedly outside the function so let's move it in to reduce redundancy.
@@ -440,14 +440,11 @@ class HelpChannels(Scheduler, commands.Cog): def is_dormant_message(self, message: t.Optional[discord.Message]) -> bool: """Return True if the contents of the `message` match `DORMANT_MSG`.""" - if not message: - return False - return self.match_bot_embed(message, DORMANT_MSG) - def match_bot_embed(self, message: discord.Message, description: str) -> bool: + def match_bot_embed(self, message: t.Optional[discord.Message], description: str) -> bool: """Return `True` if the bot's `message`'s embed description matches `description`.""" - if not message.embeds: + if not message or not message.embeds: return False embed = message.embeds[0] @@ -748,9 +745,6 @@ class HelpChannels(Scheduler, commands.Cog): async def is_empty(self, channel: discord.TextChannel) -> bool: """Return True if the most recent message in `channel` is the bot's `AVAILABLE_MSG`.""" msg = await self.get_last_message(channel) - if not msg: - return False - return self.match_bot_embed(msg, AVAILABLE_MSG) async def reset_send_permissions(self) -> None:
Correct links and text Update for new Authn page locations and terminology
@@ -6,9 +6,9 @@ layout: default [Previous Section](decide_deletion_policy.md) \| [Back to Contents](index.md) \| [Next Section](identify_server.md) -Adobe has designed a secure protocol for applications to integrate with Adobe Apis and User Sync is such an application. +Adobe has designed a secure protocol for applications to integrate with Adobe APIs and User Sync is such an application. -Setup steps are documented. For complete information about the integration setup process and certificate requirements, see [here](https://www.adobe.io/products/usermanagement/docs/setup) +Setup steps are documented. For complete information about the integration setup process and certificate requirements, see [here](https://www.adobe.io/apis/cloudplatform/console/authentication.html) - You need to create or obtain a digital certificate to sign initial API calls. - The certificate is not used for SSL or any other purpose so trust chains and browser issues do not apply. @@ -20,7 +20,7 @@ Setup steps are documented. For complete information about the integration setu &#9744; Obtain or create a digital signing certificate. See [instructions for certificate creation](https://www.adobe.io/apis/cloudplatform/console/authentication/createcert.html). -&#9744; Setup an adobe.io integration for each organization you need to access (usually only one). See Step 2 and 3 on this [page](https://www.adobe.io/apis/cloudplatform/console/authentication/gettingstarted.html) +&#9744; Use the [Adobe I/O Console](https://console.adobe.io) to add the User Management service to a new or existing adobe.io integration for each organization you need to access (usually only one). &#9744; Note the configuration parameters for your integration (redacted example shown below). They will be used in a later step.
Address a TODO comment in the legacy action provider test * Added a test action generator to the test that checks the config option "load_action_generators".
@@ -120,6 +120,11 @@ class LegacyActionProviderTest(base.BaseTest): ) self.assertEqual('output, delay=0', action_desc.params_spec) + @mock.patch.object( + legacy.LegacyActionProvider, + '_get_action_generators', + mock.MagicMock(return_value=[TestActionGenerator]) + ) def test_only_action_plugins(self): self.override_config( 'load_action_generators', @@ -129,9 +134,6 @@ class LegacyActionProviderTest(base.BaseTest): provider = legacy.LegacyActionProvider() - # TODO(rakhmerov): Implement loading actions from generators - # and test with a generator. - action_descs = provider.find_all() prefix = 'mistral.actions.std_actions'
Re-ordered test skip condition. Re-ordered boolean and so it should fail fast on Windows and not try to run the 2nd part which would cause issues.
@@ -288,7 +288,7 @@ def test_output_redirection(base_app): os.remove(filename) [email protected](getpass.getuser() == 'travis' and sys.platform.startswith('linux'), [email protected](sys.platform.startswith('linux') and getpass.getuser() == 'travis', reason="Unit test passes on Ubuntu 16.04 and Debian 8.7, but fails on TravisCI Linux containers") def test_input_redirection(base_app, request): test_dir = os.path.dirname(request.module.__file__)
ebuild.ebd_ipc: drop userpriv eapply()/unpack() support Using it causes breakage when unpacking inside dirs with root perms, e.g. recent firefox-bin ebuilds that unpack directly to subdirs under ${D}.
@@ -963,9 +963,6 @@ class Eapply(IpcCommand): output_func = self.observer.info spawn_kwargs = {'collect_fds': (1, 2)} - if self.op.userpriv: - spawn_kwargs['uid'] = os_data.portage_uid - spawn_kwargs['gid'] = os_data.portage_gid for path, patches in args: prefix = '' @@ -1070,18 +1067,13 @@ class Unpack(IpcCommand): yield archive, ext, path def run(self, args): - spawn_kwargs = {} - if self.op.userpriv: - spawn_kwargs['uid'] = os_data.portage_uid - spawn_kwargs['gid'] = os_data.portage_gid - for filename, ext, source in args.targets: self.observer.write(f'>>> Unpacking {filename} to {self.cwd}', autoline=True) self.observer.flush() dest = pjoin(self.cwd, filename[:-len(ext)]) try: target = ArComp(source, ext=ext) - target.unpack(dest=dest, **spawn_kwargs) + target.unpack(dest=dest) except ArCompError as e: raise IpcCommandError(str(e), code=e.code)
add '--no-integration-tests' option to pytest. Calling pytest with the flag '--no-integration-tests' all the tests that require an OEF node will be skipped.
@@ -40,7 +40,7 @@ ROOT_DIR = os.path.join(CUR_PATH, "..") def pytest_addoption(parser): """Add options to the parser.""" parser.addoption("--ci", action="store_true", default=False) - parser.addoption("--no-oef", action="store_true", default=False, help="Skip tests that require the OEF.") + parser.addoption("--no-integration-tests", action="store_true", default=False, help="Skip integration tests.") @pytest.fixture(scope="session") @@ -175,7 +175,7 @@ def _create_oef_docker_image(oef_addr_, oef_port_) -> Container: @pytest.fixture(scope="session") def network_node(oef_addr, oef_port, pytestconfig): """Network node initialization.""" - if pytestconfig.getoption("no_oef"): + if pytestconfig.getoption("no_integration_tests"): pytest.skip('skipped: no OEF running') return
fix(db): Log mogrified queries Prior to this, queries passed to `frappe.db.sql` with values looked like: "SELECT `defkey`,`defvalue` FROM `tabDefaultValue` WHERE `parent`=%(param1)s ORDER BY `creation`" Now, they'll look "normal" or built like: "SELECT `defkey`,`defvalue` FROM `tabDefaultValue` WHERE `parent`='__global' ORDER BY `creation`"
@@ -190,7 +190,7 @@ class MariaDBDatabase(MariaDBConnectionUtil, MariaDBExceptionUtil, Database): return db_size[0].get("database_size") def log_query(self, query, values, debug, explain): - self.last_query = self._cursor._last_executed + self.last_query = query = self._cursor._last_executed self._log_query(query, debug, explain) return self.last_query
Update Map, Filter, Reduce.md There was a spelling mistake, so edited it from "retuns" to "returns"
@@ -12,7 +12,7 @@ The ```map()``` function in python has the following syntax: Where ```func``` is the function on which each element in ```iterables``` (as many as they are) would be applied on. Notice the asterisk(```*```) on ```iterables```? It means there can be as many iterables as possible, in so far ```func``` has that exact number as required input arguments. Before we move on to an example, it's important that you note the following: -1. In Python 2, the ```map()``` function retuns a list. In Python 3, however, the function returns a ```map object``` which is a generator object. To get the result as a list, the built-in ```list()``` function can be called on the map object. i.e. ```list(map(func, *iterables))``` +1. In Python 2, the ```map()``` function returns a list. In Python 3, however, the function returns a ```map object``` which is a generator object. To get the result as a list, the built-in ```list()``` function can be called on the map object. i.e. ```list(map(func, *iterables))``` 2. The number of arguments to ```func``` must be the number of ```iterables``` listed. Let's see how these rules play out with the following examples.
fix: encode URI unconditionally ref:
@@ -334,10 +334,7 @@ frappe.router = { return null; } else { a = String(a); - if (a && a.match(/[%'"\s\t]/)) { - // if special chars, then encode a = encodeURIComponent(a); - } return a; } }).join('/');
pkg_generic_api_body_ada.mako: kill aliasing warnings TN:
@@ -8,10 +8,12 @@ with ${ada_lib_name}.Public_Converters; use ${ada_lib_name}.Public_Converters; package body ${ada_lib_name}.Generic_API is + pragma Warnings (Off, "possible aliasing problem for type"); function "+" is new Ada.Unchecked_Conversion (Internal_Context, Implementation.Internal_Context); function "+" is new Ada.Unchecked_Conversion (Implementation.Internal_Context, Internal_Context); + pragma Warnings (On, "possible aliasing problem for type"); function "+" is new Ada.Unchecked_Conversion (Internal_Unit, Implementation.Internal_Unit);
Updated total episodes Bananya/Food Wars/Beastars/Ani ni Tsukeru.
@@ -26,6 +26,7 @@ streams: title: 'Bananya: Fushigi na Nakama-tachi' alias: ['Bananya and the Curious Bunch'] has_source: false +length: 13 info: mal: 'https://myanimelist.net/anime/40228' anilist: 'https://anilist.co/anime/110881/Bananya-Fushigi-na-Nakamatachi/' @@ -847,6 +848,7 @@ streams: --- title: 'Beastars' has_source: true +length: 12 info: mal: 'https://myanimelist.net/anime/39195/Beastars' anilist: 'https://anilist.co/anime/107660/BEASTARS/' @@ -947,6 +949,7 @@ streams: title: 'Shokugeki no Souma: Shin no Sara' alias: ['Food Wars! The Fourth Plate'] has_source: true +length: 12 info: mal: 'https://myanimelist.net/anime/39940/Shokugeki_no_Souma__Shin_no_Sara' anilist: 'https://anilist.co/anime/109963/Shokugeki-no-Souma-Shin-no-Sara/' @@ -1223,6 +1226,7 @@ streams: title: 'Ani ni Tsukeru Kusuri wa Nai! Season 3' alias: ['Please Take My Brother Away'] has_source: true +length: 12 info: mal: 'https://myanimelist.net/anime/39959/Ani_ni_Tsukeru_Kusuri_wa_Nai_3' anilist: 'https://anilist.co/anime/110088/Ani-ni-Tsukeru-Kusuri-wa-Nai-3/'
storage: handle cn-north-1 region Handle S3 cn-north-1's endpoint, which has a com.cn TLD.
@@ -744,6 +744,9 @@ class S3Storage(_CloudStorage): connect_kwargs["endpoint_url"] = "https://s3.{region}.amazonaws.com".format( region=s3_region ) + # cn-north-1's endpoint has a .com.cn TLD + if s3_region == "cn-north-1": + connect_kwargs["endpoint_url"] = connect_kwargs["endpoint_url"] + ".cn" elif host or endpoint_url: connect_kwargs["endpoint_url"] = endpoint_url or _build_endpoint_url( host, port=port, is_secure=True
added command to source gdbinit.py you need to source gdbinit.py for it to take effect
@@ -36,6 +36,7 @@ Installation is straightforward. Pwndbg is best supported on Ubuntu 18.04 with git clone https://github.com/pwndbg/pwndbg cd pwndbg ./setup.sh +echo "source $(pwd)/gdbinit.py" >> ~/.gdbinit ``` Other Linux distributions are also supported via `setup.sh`, including:
Add instructions to run Rally as a Kubernetes Job or a Docker container Closes
@@ -129,6 +129,16 @@ Whenever you want to use Rally, run the activation script (step 2 above) first. .. _install_offline-install: +Kubernetes Job +-------------- + +You can run Rally as a Kubernetes `Job <https://kubernetes.io/docs/concepts/workloads/controllers/jobs-run-to-completion/>`_ via `this <https://github.com/gdmello/elasticsearch-rally>`_ `Helm Chart <https://helm.sh/>`_. + +Docker +------ + +You can run Rally as a docker container too. Follow the instructions `here <https://github.com/gdmello/elasticsearch-rally/tree/master/docker>`_. + Offline Install ---------------
Fix typo in Github links in elementwise_ops_schema.cc Summary: s/elementwise_op_schema.cc/elementwise_ops_schema.cc Pull Request resolved:
@@ -27,7 +27,7 @@ Argument `broadcast=1` needs to be passed to enable broadcasting. Github Links: -- https://github.com/pytorch/pytorch/blob/master/caffe2/operators/elementwise_op_schema.cc +- https://github.com/pytorch/pytorch/blob/master/caffe2/operators/elementwise_ops_schema.cc )DOC"; @@ -883,7 +883,7 @@ Performs element-wise negation on input tensor `X`. Github Links: -- https://github.com/pytorch/pytorch/blob/master/caffe2/operators/elementwise_op_schema.cc +- https://github.com/pytorch/pytorch/blob/master/caffe2/operators/elementwise_ops_schema.cc <details> @@ -938,7 +938,7 @@ OPERATOR_SCHEMA(Sign) Computes sign for each element of the input: -1, 0 or 1. Github Link: -- https://github.com/pytorch/pytorch/blob/master/caffe2/operators/elementwise_op_schema.cc +- https://github.com/pytorch/pytorch/blob/master/caffe2/operators/elementwise_ops_schema.cc <details>
Add inplace TileFetcher module back to public builder API This commit addresses an import error encountered when trying to import inplace TileFetcher functionality. Example failing import that will be fixed by this commit: from starfish.experiment.builder.inplace import ( InplaceFetchedTile, enable_inplace_mode, inplace_tile_opener)
-from starfish.core.experiment.builder import build_image, write_experiment_json # noqa: F401 +from starfish.core.experiment.builder import ( # noqa: F401 + build_image, + inplace, + write_experiment_json, +) from starfish.core.experiment.builder.providers import FetchedTile, TileFetcher # noqa: F401
Pin flask dependencies to versions supporting 1.x Flask should be upgraded to 2.x. For the purposes of switching to python3, keep it at 1.x to minimize amount of changes at the same time.
@@ -41,8 +41,13 @@ GENERIC_REQ = [ ] WEB_REQ = [ + # TODO: upgrade flask and associated dependencies to 2.x "Flask == 1.1.1", "flask-talisman==0.7.0", + "jinja2 <= 2.11.3", # jinja 3.x requires also upgrading flask to 2.x + "markupsafe <= 1.1.1", # markupsafe 2.x requires also upgrading flask to 2.x + "itsdangerous <= 1.1.0", # markupsafe 2.x requires also upgrading flask to 2.x + "werkzeug <= 1.0.1", # markupsafe 2.x requires also upgrading flask to 2.x "blinker == 1.4", ]
Log dead worker replacement We saw a log suggesting a large number of threads. This should help find out if its due to dead workers being replaced.
@@ -218,6 +218,7 @@ class DeployDaemon(PaastaThread): live_workers = len([worker for worker in self.workers if worker.is_alive()]) number_of_dead_workers = self.config.get_deployd_number_workers() - live_workers for i in range(number_of_dead_workers): + self.log.error("Detected a dead worker, starting a replacement thread") worker_no = len(self.workers) + 1 worker = PaastaDeployWorker(worker_no, self.inbox_q, self.bounce_q, self.config, self.metrics) worker.start()
Update handle_offline_cpu.sh 1. Check if ethtool is available. 2. Check if ethtool -L is succeeded.
@@ -30,6 +30,22 @@ function Main() { basedir=$(pwd) syn_net_adpt="" + ethtool --version + if [ $? != 0 ]; then + install_package ethtool + LogMsg "Installing ethtool in the VM" + ethtool --version + if [ $? != 0 ]; then + LogErr "Did not find ethtool and could not install ethtool. Stop the test here" + SetTestStateFailed + exit 0 + else + LogMsg "Successfully installed ethtool in the VM, and proceed the rest of test steps." + fi + else + LogMsg "Found ethtool in the VM, and proceed the rest of steps." + fi + LogMsg "Change all vmbus channels' cpu id to 0, if non-zero" for _device in /sys/bus/vmbus/devices/* do @@ -107,6 +123,10 @@ function Main() { ((_new_counts=_new_counts+1)) ethtool -L eth0 combined $_new_counts + if [ $? != 0 ]; then + LogErr "Failed to execute channel number change by ethtool, $?" + failed_count=$((failed_count+1)) + else sleep 1 LogMsg "Changed the channel numbers to $_new_counts" @@ -131,6 +151,7 @@ function Main() { fi done < new_channel_vp_mapping fi + fi echo "job_completed=0" >> $basedir/constants.sh LogMsg "Main function job completed" }
test-backend: Improve performance by disabling XML report. We don't use the XML report ourselves. We add options to make this easy to control if specific circumstances indicate doing so.
@@ -232,6 +232,12 @@ def main() -> None: parser.add_argument( "--verbose-coverage", action="store_true", help="Enable verbose print of coverage report." ) + parser.add_argument( + "--xml-report", action="store_true", help="Enable (slow) XML coverage report." + ) + parser.add_argument( + "--no-html-report", action="store_true", help="Disable (slow) HTML coverage report." + ) parser.add_argument( "--no-cov-cleanup", action="store_true", help="Do not clean generated coverage files." ) @@ -441,7 +447,12 @@ def main() -> None: if options.verbose_coverage: print("Printing coverage data") cov.report(show_missing=False) + if options.xml_report: + print("Writing XML report") cov.xml_report(outfile="var/coverage.xml") + print("XML report saved; see var/coverage.xml") + if not options.no_html_report: + print("Writing HTML report") cov.html_report(directory="var/coverage", show_contexts=True) print("HTML report saved; visit at http://127.0.0.1:9991/coverage/index.html") if full_suite and not failures and options.coverage:
Fix ELB discovery. ELBs will not provide 'aliases' so we do not require it on the creation API. Update API already handles this.
@@ -97,10 +97,9 @@ def create(**kwargs): :param kwargs: :return: """ - alias_names = kwargs.pop("aliases") endpoint = Endpoint(**kwargs) - if alias_names: - endpoint.aliases = [EndpointDnsAlias(alias=name) for name in alias_names] + if "aliases" in kwargs: + endpoint.aliases = [EndpointDnsAlias(alias=name) for name in kwargs.pop("aliases")] database.create(endpoint) metrics.send( "endpoint_added", "counter", 1, metric_tags={"source": endpoint.source.label}
Update github-takeover.yaml This take over is no longer possible
@@ -3,7 +3,7 @@ id: github-takeover info: name: Github Takeover Detection author: pdteam,th3r4id - severity: high + severity: info reference: - https://github.com/EdOverflow/can-i-take-over-xyz tags: takeover,github
bot_code: Add c dir to PASSLIST PASSLIST in bot_main.py defines what may stay in the swarming bot root. The named caches root wasn't in the PASSLIST, so it was deleted on each bot start. Add it there. Review-Url:
@@ -71,6 +71,7 @@ SINGLETON = singleton.Singleton(os.path.dirname(THIS_FILE)) # for more details. PASSLIST = ( '*-cacert.pem', + 'c', 'cipd_cache', 'isolated_cache', 'logs',
$.Analysis: make Unit_Provider_Access type a general one ("access all") TN:
@@ -106,7 +106,7 @@ package ${ada_lib_name}.Analysis is type Unit_Provider_Interface is limited interface; type Unit_Provider_Access is - access Unit_Provider_Interface'Class; + access all Unit_Provider_Interface'Class; type Unit_Provider_Access_Cst is access constant Unit_Provider_Interface'Class; ${ada_doc('langkit.unit_provider_type', 3)}
Convert README CI badge to GitHub Actions We don't use Travis CI any more.
@@ -14,7 +14,7 @@ QuTiP: Quantum Toolbox in Python [P. D. Nation](https://github.com/nonhermitian), and [J. R. Johansson](https://github.com/jrjohansson) -[![Build Status](https://img.shields.io/travis/qutip/qutip?logo=Travis)](https://travis-ci.org/qutip/qutip) +[![Build Status](https://github.com/qutip/qutip/actions/workflows/tests.yml/badge.svg?branch=master)](https://github.com/qutip/qutip/actions/workflows/tests.yml) [![Coverage Status](https://img.shields.io/coveralls/qutip/qutip.svg?logo=Coveralls)](https://coveralls.io/r/qutip/qutip) [![Maintainability](https://api.codeclimate.com/v1/badges/df502674f1dfa1f1b67a/maintainability)](https://codeclimate.com/github/qutip/qutip/maintainability) [![license](https://img.shields.io/badge/license-New%20BSD-blue.svg)](https://opensource.org/licenses/BSD-3-Clause)
BUG: remove setup/teardown Remove setup/teardown since these functions are defined at a different level.
@@ -117,20 +117,14 @@ def generate_instrument_list(package=None): class InstTestClass(): """Provides standardized tests for pysat instrument libraries. """ - def setup(self): - self.module_attrs = ['platform', 'name', 'tags', 'sat_ids', + module_attrs = ['platform', 'name', 'tags', 'sat_ids', 'load', 'list_files', 'download'] - self.inst_attrs = ['tag', 'sat_id', 'acknowledgements', 'references'] - self.inst_callable = ['load', 'list_files', 'download', 'clean', - 'default'] - self.attr_types = {'platform': str, 'name': str, 'tags': dict, + inst_attrs = ['tag', 'sat_id', 'acknowledgements', 'references'] + inst_callable = ['load', 'list_files', 'download', 'clean', 'default'] + attr_types = {'platform': str, 'name': str, 'tags': dict, 'sat_ids': dict, 'tag': str, 'sat_id': str, 'acknowledgements': str, 'references': str} - def teardown(self): - del self.inst_attrs, self.inst_callable, self.inst_types - del self.module_attrs - @pytest.mark.all_inst def test_modules_standard(self, name): """Checks that modules are importable and have standard properties.
Update stack with updated template Using self.template will describe existing stack info, and return the same template in stack. Updated template will be overridden.
@@ -174,17 +174,18 @@ class ClusterStack(StackInfo): def _persist_stack_resources(self, keys): """Set the resources in template identified by keys to have a DeletionPolicy of 'Retain'.""" + template = self.template for key in keys: - self.template["Resources"][key]["DeletionPolicy"] = "Retain" + template["Resources"][key]["DeletionPolicy"] = "Retain" try: - self._update_template() + self._update_template(template) except AWSClientError as e: raise ClusterActionError(f"Unable to persist logs on cluster deletion, failed with error: {e}.") - def _update_template(self): - """Update template of the running stack according to self.template.""" + def _update_template(self, template): + """Update template of the running stack according to updated template.""" try: - AWSApi.instance().cfn.update_stack(self.name, self.template, self._params) + AWSApi.instance().cfn.update_stack(self.name, template, self._params) self._wait_for_update() except AWSClientError as e: if "no updates are to be performed" in str(e).lower():
Update README.md Small change
@@ -52,7 +52,7 @@ Please refer to the [full documentation](http://yeti-platform.readthedocs.io/en/ Yeti has a `docker-compose` script to get up and running even faster; this is useful for testing or even running production instances of Yeti should your infrastructure support it. Full instructions [here](https://github.com/yeti-platform/yeti/tree/master/extras/docker), but in a nutshell: $ git clone https://github.com/yeti-platform/yeti.git - $ cd yeti/extras/docker + $ cd yeti/extras/docker/dev $ docker-compose up ## Useful links
Simplified New publishing check + removed unnecessary Webhook check Replaced type checking with `TextChannel.is_news()` for simplification to check is possible to publish new Removed unnecessary `while` loop on `send_webhook` that check is webhook available. No need for this after starting ordering modification.
@@ -116,7 +116,7 @@ class News(Cog): ) payload["data"]["pep"].append(msg.id) - if msg.channel.type is discord.ChannelType.news: + if msg.channel.is_news(): log.trace("Publishing PEP annnouncement because it was in a news channel") await msg.publish() @@ -170,7 +170,7 @@ class News(Cog): ) payload["data"][maillist].append(msg.id) - if msg.channel.type is discord.ChannelType.news: + if msg.channel.is_news(): log.trace("Publishing mailing list message because it was in a news channel") await msg.publish() @@ -223,10 +223,6 @@ class News(Cog): ) embed.set_footer(text=footer, icon_url=AVATAR_URL) - # Wait until Webhook is available - while not self.webhook: - pass - return await self.webhook.send( embed=embed, username=webhook_profile_name,
Move check for existing letter earlier in endpoint In response to: [^1]. [^1]:
@@ -169,6 +169,10 @@ def send_pdf_letter_notification(service_id, post_data): allow_guest_list_recipients=False, ) + # notification already exists e.g. if the user clicked send in different tabs + if get_notification_by_id(post_data['file_id']): + return {'id': str(post_data['file_id'])} + template = get_precompiled_letter_template(service.id) file_location = 'service-{}/{}.pdf'.format(service.id, post_data['file_id']) @@ -179,10 +183,6 @@ def send_pdf_letter_notification(service_id, post_data): post_data['file_id'], current_app.config['TRANSIENT_UPLOADED_LETTERS']) ) - # notification already exists e.g. if the user clicked send in different tabs - if get_notification_by_id(post_data['file_id']): - return {'id': str(post_data['file_id'])} - raise e # Getting the page count won't raise an error since admin has already checked the PDF is valid
add mat_slopescaledepthbias_decal 0.000001 to decals_off a tradeoff, since it cannot be switched from in game anymore.
@@ -490,8 +490,9 @@ alias characters_ultra "r_flex 1;flex_rules 1;anim_3wayblend 1;r_teeth 1;flex_sm //r_decal_overlap_area .8 // Allow for decals to overlap each other more //r_decal_overlap_count 0 // Do not allow decals to overlap each other //r_decal_overlap_count 3 // Allow for 3 decals to overlap each other +//mat_slopescaledepthbias_decal 0.000001 // Force hides decals, preventing a bug where 1 decal will still appear on 0 decals. Unfortunately, this cannot be reverted, because the default value is hardware specific. -alias decals_off "r_decals 0;mp_decals 0;r_decal_cover_count 0;r_decal_overlap_area 1;r_decal_overlap_count 0" +alias decals_off "r_decals 0;mp_decals 0;r_decal_cover_count 0;r_decal_overlap_area 1;r_decal_overlap_count 0;mat_slopescaledepthbias_decal 0.000001" alias decals_low "r_decals 9;mp_decals 9;r_decal_cover_count 1;r_decal_overlap_area .4;r_decal_overlap_count 0" alias decals_medium "r_decals 32;mp_decals 32;r_decal_cover_count 1;r_decal_overlap_area .4;r_decal_overlap_count 0" alias decals_high "r_decals 80;mp_decals 80;r_decal_cover_count 4;r_decal_overlap_area .8;r_decal_overlap_count 3"
Improve error handling throw exception when file not found print name of file not found
@@ -86,7 +86,10 @@ def do_prepifg(gtiff_paths, params): log.info("Preparing interferograms by cropping/multilooking") parallel = params[cf.PARALLEL] - if all([os.path.isfile(f) for f in gtiff_paths]): + for f in gtiff_paths: + if not os.path.isfile(f): + raise Exception("Can not find geotiff: " + str(f) + ". Ensure you have converted your interferograms to geotiffs.") + ifgs = [prepifg_helper.dem_or_ifg(p) for p in gtiff_paths] xlooks, ylooks, crop = cf.transform_params(params) user_exts = (params[cf.IFG_XFIRST], params[cf.IFG_YFIRST], @@ -99,12 +102,7 @@ def do_prepifg(gtiff_paths, params): delayed(_prepifg_multiprocessing)(p, xlooks, ylooks, exts, thresh, crop, params) for p in gtiff_paths) else: - [_prepifg_multiprocessing(p, xlooks, ylooks, exts, thresh, crop, - params) for p in gtiff_paths] - else: - log.error("Full-res geotiffs do not exist. Ensure you have" - " converted your interferograms to geotiffs.") - sys.exit(1) + [_prepifg_multiprocessing(p, xlooks, ylooks, exts, thresh, crop, params) for p in gtiff_paths] def _prepifg_multiprocessing(path, xlooks, ylooks, exts, thresh, crop, params): """
Fix tests for apostrophe Previous commit didn't reflect all locations and had a copy/paste error.
@@ -1029,7 +1029,7 @@ def test_xmp_sidecar_gps(): xmlns:photoshop="http://ns.adobe.com/photoshop/1.0/"> <photoshop:SidecarForExtension>jpg</photoshop:SidecarForExtension> <dc:description></dc:description> - <dc:title>St. James's Park</dc:title> + <dc:title>St. James&#39;s Park</dc:title> <!-- keywords and persons listed in <dc:subject> as Photos does --> <dc:subject> <rdf:Seq> @@ -1038,7 +1038,7 @@ def test_xmp_sidecar_gps(): <rdf:li>London</rdf:li> <rdf:li>United Kingdom</rdf:li> <rdf:li>London 2018</rdf:li> - <rdf:li>St. James's Park</rdf:li> + <rdf:li>St. James&#39;s Park</rdf:li> </rdf:Seq> </dc:subject> <photoshop:DateCreated>2018-10-13T09:18:12.501000-04:00</photoshop:DateCreated> @@ -1055,7 +1055,7 @@ def test_xmp_sidecar_gps(): <rdf:li>London</rdf:li> <rdf:li>United Kingdom</rdf:li> <rdf:li>London 2018</rdf:li> - <dc:title>St. James&#39;s Park</dc:title> + <rdf:li>St. James&#39&#39;s Park</rdf:li> </rdf:Seq> </digiKam:TagsList> </rdf:Description>
MAINT: precompute log(2.0 * M_PI) in `random_loggam' Most compilers should optimize it, but it doesn't hurt to inline and has a better name now.
@@ -342,7 +342,7 @@ uint64_t random_uint(bitgen_t *bitgen_state) { * using logfactorial(k) instead. */ double random_loggam(double x) { - double x0, x2, xp, gl, gl0; + double x0, x2, lg2pi, gl, gl0; RAND_INT_TYPE k, n; static double a[10] = {8.333333333333333e-02, -2.777777777777778e-03, @@ -350,23 +350,25 @@ double random_loggam(double x) { 8.417508417508418e-04, -1.917526917526918e-03, 6.410256410256410e-03, -2.955065359477124e-02, 1.796443723688307e-01, -1.39243221690590e+00}; - x0 = x; - n = 0; + if ((x == 1.0) || (x == 2.0)) { return 0.0; - } else if (x <= 7.0) { + } else if (x < 7.0) { n = (RAND_INT_TYPE)(7 - x); - x0 = x + n; + } else { + n = 0; } - x2 = 1.0 / (x0 * x0); - xp = 2 * M_PI; + x0 = x + n; + x2 = (1.0 / x0) * (1.0 / x0); + /* log(2 * M_PI) */ + lg2pi = 1.8378770664093453e+00; gl0 = a[9]; for (k = 8; k >= 0; k--) { gl0 *= x2; gl0 += a[k]; } - gl = gl0 / x0 + 0.5 * log(xp) + (x0 - 0.5) * log(x0) - x0; - if (x <= 7.0) { + gl = gl0 / x0 + 0.5 * lg2pi + (x0 - 0.5) * log(x0) - x0; + if (x < 7.0) { for (k = 1; k <= n; k++) { gl -= log(x0 - 1.0); x0 -= 1.0;
fix(Query Report): Abort last ajax request in refresh If there are two ajax requests and the 1st one takes longer than the 2nd, it's data overrides the report later
@@ -235,7 +235,13 @@ frappe.views.QueryReport = class QueryReport extends frappe.views.BaseList { filters = Object.assign(filters || {}, obj); } - return new Promise(resolve => frappe.call({ + // only one refresh at a time + if (this.last_ajax) { + this.last_ajax.abort(); + } + + return new Promise(resolve => { + this.last_ajax = frappe.call({ method: 'frappe.desk.query_report.run', type: 'GET', args: { @@ -243,7 +249,8 @@ frappe.views.QueryReport = class QueryReport extends frappe.views.BaseList { filters: filters, }, callback: resolve - })).then(r => { + }) + }).then(r => { let data = r.message; this.hide_status();
fix typo in _ppg_clean_nabian2018 docs assuming it's ok to directly push to dev for small typos!
@@ -117,7 +117,7 @@ def _ppg_clean_elgendi(ppg_signal, sampling_rate): def _ppg_clean_nabian2018(ppg_signal, sampling_rate, heart_rate=None): - """Low-pass filter for continuous BP signal preprocessing, adaopted from Nabian et al. (2018).""" + """Low-pass filter for continuous BP signal preprocessing, adapted from Nabian et al. (2018).""" # Determine low-pass filter value highcut = 40
Make most ignore patterns more selective Specifically, match them only in the current (project root) directory.
-*.egg-info +/*.egg-info *.pyc -.coverage +/.coverage .DS_Store -build/ -coverage.xml -dist/ -htmlcov/ -venv/ -_build/ -_static/ -_templates/ -_test.py +/build/ +/coverage.xml +/dist/ +/htmlcov/ +/venv/ +/_build/ +/_static/ +/_templates/ +/_test.py
Update appshell_extensions.js Updating comments
@@ -37,6 +37,8 @@ if (!appshell.fs) { if (!appshell.app) { appshell.app = {}; } + +// Alias the appshell object to brackets. This is temporary and should be removed. if (!brackets) { brackets = appshell; }
Fix BI API HG-- branch : feature/microservices
@@ -337,7 +337,7 @@ class BIAPI(API): "username": u.username, "full_name": "%s %s" % (u.last_name, u.first_name) } for u in qs), - key=lambda u: u["name"]) + key=lambda u: u["username"]) @executor("query") @api @@ -363,10 +363,10 @@ class BIAPI(API): if ar.user: i["user"] = { "id": ar.user.id, - "name": "%s %s" % (u.last_name, u.first_name) + "name": "%s %s" % (ar.user.last_name, ar.user.first_name) } if ar.group: - i["user"] = { + i["group"] = { "id": ar.group.id, "name": ar.group.name }
2.8.3 Automatically generated by python-semantic-release
@@ -9,7 +9,7 @@ https://community.home-assistant.io/t/echo-devices-alexa-as-media-player-testers """ from datetime import timedelta -__version__ = "2.8.2" +__version__ = "2.8.3" PROJECT_URL = "https://github.com/custom-components/alexa_media_player/" ISSUE_URL = "{}issues".format(PROJECT_URL)
Require xopen 0.7.3 (for the PipedGzipReader speedups) Close
@@ -102,7 +102,7 @@ setup( package_dir={'': 'src'}, packages=find_packages('src'), entry_points={'console_scripts': ['cutadapt = cutadapt.__main__:main']}, - install_requires=['dnaio>=0.3', 'xopen>=0.5.0'], + install_requires=['dnaio>=0.3', 'xopen>=0.7.3'], extras_require={ 'dev': ['Cython', 'pytest', 'pytest-timeout', 'sphinx', 'sphinx_issues'], },
Log statistics about restart/omit trials As discussed, logging these statistics has two benefits: 1. Help us confirm that all missing data are intended. 2. Give us the statistics needed to find a better solution to missing data.
@@ -219,6 +219,8 @@ class TrialInstanceManager: # pylint: disable=too-many-instance-attributes def __init__(self, num_trials, experiment_config): self.experiment_config = experiment_config self.num_trials = num_trials + self.num_preemptible_restarts = 0 + self.num_preemptible_omits = 0 # Bound for the number of nonpreemptibles we can start if the experiment # specified preemptible_runners. @@ -334,6 +336,20 @@ class TrialInstanceManager: # pylint: disable=too-many-instance-attributes return get_started_trials(self.experiment_config['experiment']).filter( models.Trial.preemptible.is_(False)).count() + def _format_count_info(self, trial: models.Trial, count: int) -> str: + """Formats a trial's count and information for logging.""" + return (f'Trial ID: {trial.id}. ' + f'Benchmark-Fuzzer pair: {trial.benchmark}-{trial.fuzzer}. ' + f'Accumulating to {count/self.num_trials*100:3.2f}% ' + f'({count} / {self.num_trials}) of all trials.') + + def _log_restart(self, preemptible: bool, trial: models.Trial, + count: int) -> None: + """Logs the count of restarting trials.""" + logs.info('Restarting a preemptible trial as a %s one: %s', + 'preemptible' if preemptible else 'nonpreemptible', + self._format_count_info(trial, count)) + def _get_preempted_replacements(self, preempted_trials) -> List[models.Trial]: """Returns a list containing a replacement trial for each trial that can @@ -353,7 +369,10 @@ class TrialInstanceManager: # pylint: disable=too-many-instance-attributes # trying nonpreemptible to minimize cost. if self.can_start_preemptible(): # See if we can replace with a preemptible. + self.num_preemptible_restarts += 1 replacements.append(replace_trial(trial, preemptible=True)) + + self._log_restart(True, trial, self.num_preemptible_restarts) continue if self.can_start_nonpreemptible(nonpreemptible_starts): @@ -361,8 +380,15 @@ class TrialInstanceManager: # pylint: disable=too-many-instance-attributes # replace it with a nonpreemptible. nonpreemptible_starts += 1 replacements.append(replace_trial(trial, preemptible=False)) + + self._log_restart(False, trial, nonpreemptible_starts) continue + self.num_preemptible_omits += 1 + logs.warning( + 'Omitting a trial to cap cost: %s', + self._format_count_info(trial, self.num_preemptible_omits)) + return replacements def _get_started_unfinished_instances(self) -> Dict[str, models.Trial]:
Add gshortcut to MIME type choices to be consistent Add third party MIME type shortcut
@@ -500,6 +500,7 @@ MIMETYPE_GA_SCRIPT = f'{APPLICATION_VND_GOOGLE_APPS}script' MIMETYPE_GA_SITES = f'{APPLICATION_VND_GOOGLE_APPS}sites' MIMETYPE_GA_SPREADSHEET = f'{APPLICATION_VND_GOOGLE_APPS}spreadsheet' MIMETYPE_GA_SHORTCUT = f'{APPLICATION_VND_GOOGLE_APPS}shortcut' +MIMETYPE_GA_3P_SHORTCUT = f'{APPLICATION_VND_GOOGLE_APPS}drive-sdk' MIMETYPE_CHOICES_MAP = { 'gdoc': MIMETYPE_GA_DOCUMENT, @@ -511,6 +512,8 @@ MIMETYPE_CHOICES_MAP = { 'gfusion': MIMETYPE_GA_FUSIONTABLE, 'gpresentation': MIMETYPE_GA_PRESENTATION, 'gscript': MIMETYPE_GA_SCRIPT, + 'gshortcut': MIMETYPE_GA_SHORTCUT, + 'g3pshortcut': MIMETYPE_GA_3P_SHORTCUT, 'gsite': MIMETYPE_GA_SITES, 'gsheet': MIMETYPE_GA_SPREADSHEET, 'gspreadsheet': MIMETYPE_GA_SPREADSHEET,
[easy] Add blank line to docstring A small help for the poor docstring parser in its ungrateful job... (rendered docs look mangled)
class IntermediateStorageDefinition(IConfigMappable): """Defines intermediate data storage behaviors. + Args: name (str): Name of the storage mode. is_persistent (bool): Whether the storage is persistent in a way that can cross process/node
Add e3-core as a dependency for Langkit TN:
@@ -21,7 +21,8 @@ setup( author_email='[email protected]', url='https://www.adacore.com', description='A Python framework to generate language parsers', - install_requires=['Mako', 'PyYAML', 'enum', 'enum34', 'funcy', 'docutils'], + install_requires=['Mako', 'PyYAML', 'enum', 'enum34', 'funcy', 'docutils', + 'e3-core'], packages=['langkit', 'langkit.expressions', 'langkit.gdb',
Signal error on connection error instead of asserting Summary: No need to assert on connection errors.
@@ -485,7 +485,10 @@ void Pair::handleConnecting() { // Verify that connecting was successful rv = getsockopt(fd_, SOL_SOCKET, SO_ERROR, &optval, &optlen); GLOO_ENFORCE_NE(rv, -1); - GLOO_ENFORCE_EQ(optval, 0, "SO_ERROR: ", strerror(optval)); + if (optval != 0) { + signalIoFailure( + GLOO_ERROR_MSG("connect ", peer_.str(), ": ", strerror(optval))); + } // Common connection-made code handleConnected();
Add Microsoft Azure Link Add link to documentation for setting up a hail-capable cluster on Microsoft Azure HDInsight
@@ -10,6 +10,14 @@ While Hail does not have any built-in tools for working with tool <https://github.com/hms-dbmi/hail-on-AWS-spot-instances>`__ developed by Carlos De Niz with the `Avillach Lab <https://avillach-lab.hms.harvard.edu/>`_ at Harvard Medical School +Microsoft Azure +--------------- + +The step by step, latest process documentation for creating a hail-capable cluster in +Azure, utilizing an HDInsight Spark Cluster can be found +`here <https://github.com/TheEagleByte/azure-hail>`__ compiled by Garrett Bromley with +`E360 Genomics at IQVIA. <https://www.iqvia.com/solutions/real-world-evidence/platforms/e360-real-world-data-platform>`__ + Others ------