hash
stringlengths
40
40
diff
stringlengths
131
26.7k
message
stringlengths
7
694
project
stringlengths
5
67
split
stringclasses
1 value
diff_languages
stringlengths
2
24
ad1d62b921e15222cd81f6b305e8417b3b7ebbcb
diff --git a/lib/apivore/rspec_builder.rb b/lib/apivore/rspec_builder.rb index <HASH>..<HASH> 100644 --- a/lib/apivore/rspec_builder.rb +++ b/lib/apivore/rspec_builder.rb @@ -1,6 +1,7 @@ require 'apivore/rspec_matchers' require 'action_controller' require 'action_dispatch' +require 'rspec/mocks' require 'hashie' module Apivore @@ -8,7 +9,7 @@ module Apivore include Apivore::RspecMatchers include ActionDispatch::Integration include RSpec::Mocks::ExampleMethods - + @@setups ||= {} def apivore_setup(path, method, response, &block)
Add require 'rspec/mocks' to fix crash.
westfieldlabs_apivore
train
rb
2452dfec70105cc132af58bc946ef30f6bb37168
diff --git a/scripts/generate/js_api.js b/scripts/generate/js_api.js index <HASH>..<HASH> 100644 --- a/scripts/generate/js_api.js +++ b/scripts/generate/js_api.js @@ -51,11 +51,15 @@ module.exports = function (branch, done) { function readSpecFiles(done) { var apiDir = path.join(esDir, 'rest-api-spec/api/'); - files = fs.readdirSync(apiDir).map(function (filename) { - var module = require(apiDir + filename); - delete require.cache[apiDir + filename]; - return module; - }); + files = fs.readdirSync(apiDir) + .filter(function (filename) { + return filename[0] !== '_' + }) + .map(function (filename) { + var module = require(apiDir + filename); + delete require.cache[apiDir + filename]; + return module; + }); done(); }
[api/generate] ignore spec files that start with an underscore
elastic_elasticsearch-js
train
js
0de444f1ec668da21f8755458a50b041632e62a3
diff --git a/perceval/_version.py b/perceval/_version.py index <HASH>..<HASH> 100644 --- a/perceval/_version.py +++ b/perceval/_version.py @@ -1,2 +1,2 @@ # Versions compliant with PEP 440 https://www.python.org/dev/peps/pep-0440 -__version__ = "0.5.0.dev5" +__version__ = "0.5.0.dev6"
Update version number to '<I>.dev6'
chaoss_grimoirelab-perceval
train
py
2700a35d4e3acf2bf73c59f2032e325b229053e9
diff --git a/leetcode/views/result.py b/leetcode/views/result.py index <HASH>..<HASH> 100644 --- a/leetcode/views/result.py +++ b/leetcode/views/result.py @@ -82,6 +82,10 @@ class ResultView(urwid.Frame): blank, your_answer_header, your_answer, blank, expected_answer_header, expected_answer ] + if len(self.result.get('std_output', '')) > 0: + stdout_header = urwid.Text('Stdout:') + stdout = urwid.Text(self.result['std_output']) + list_items.extend([blank, stdout_header, stdout]) return urwid.Padding(urwid.ListBox(urwid.SimpleListWalker(list_items)), left=2, right=2) def make_compile_error_view(self):
Print std_output if it's not empty
chishui_terminal-leetcode
train
py
c5fed95b819c683b5aa576f465cfbf8e90d937ba
diff --git a/alerta/app/__init__.py b/alerta/app/__init__.py index <HASH>..<HASH> 100644 --- a/alerta/app/__init__.py +++ b/alerta/app/__init__.py @@ -14,6 +14,9 @@ app.config.from_object('alerta.settings') app.config.from_pyfile('/etc/alertad.conf', silent=True) app.config.from_envvar('ALERTA_SVR_CONF_FILE', silent=True) +if 'BASE_URL' in os.environ: + app.config['BASE_URL'] = os.environ['BASE_URL'] + if 'SECRET_KEY' in os.environ: app.config['SECRET_KEY'] = os.environ['SECRET_KEY']
Add BASE_URL as env var setting
alerta_alerta
train
py
2d0e409ac4f48b73ae5555977f6cc21e91948df7
diff --git a/js/h5p.js b/js/h5p.js index <HASH>..<HASH> 100644 --- a/js/h5p.js +++ b/js/h5p.js @@ -1948,7 +1948,7 @@ H5P.createTitle = function (rawTitle, maxLength) { if (H5PIntegration.saveFreq !== false) { // Store the current state of the H5P when leaving the page. - H5P.$window.on('beforeunload', function () { + var storeCurrentState = function () { for (var i = 0; i < H5P.instances.length; i++) { var instance = H5P.instances[i]; if (instance.getCurrentState instanceof Function || @@ -1960,7 +1960,9 @@ H5P.createTitle = function (rawTitle, maxLength) { } } } - }); + }; + H5P.$window.one('beforeunload unload', storeCurrentState); + H5P.$window.on('pagehide', storeCurrentState); } /**
Made content state beeing saved on safari@iPad when closing tab and leaving tab
h5p_h5p-php-library
train
js
dde69d82b39feb853b9aa93976d312b9daf1c0c2
diff --git a/superset/models/dashboard.py b/superset/models/dashboard.py index <HASH>..<HASH> 100644 --- a/superset/models/dashboard.py +++ b/superset/models/dashboard.py @@ -156,7 +156,7 @@ class Dashboard( # pylint: disable=too-many-instance-attributes ] def __repr__(self) -> str: - return f"Dashboard<{self.slug or self.id}>" + return f"Dashboard<{self.id or self.slug}>" @property def table_names(self) -> str: @@ -253,7 +253,7 @@ class Dashboard( # pylint: disable=too-many-instance-attributes @cache.memoize( # manage cache version manually - make_name=lambda fname: f"{fname}-v2", + make_name=lambda fname: f"{fname}-v2.1", timeout=config["DASHBOARD_CACHE_TIMEOUT"], unless=lambda: not is_feature_enabled("DASHBOARD_CACHE"), )
fix: use dashboard id for stable cache key (#<I>)
apache_incubator-superset
train
py
d2da491e971c8122b34a1ecaede842bc270622cd
diff --git a/bin/geo2topo.js b/bin/geo2topo.js index <HASH>..<HASH> 100644 --- a/bin/geo2topo.js +++ b/bin/geo2topo.js @@ -110,9 +110,22 @@ function formatProperties(collection, v) { if(id !== '-99') { feature.id = id; feature.properties.ct = getCentroid(feature); + continue; } } + // Unfortunately, we need this to include Norway (IS0_A3=NOR) + // from Natural Earth v4.1.0 + // - https://github.com/nvkelso/natural-earth-vector/issues/252 + if(v.ids && v.ids.indexOf('ISO_A3') === 0) { + id = feature.properties['SOV_A3']; + + if(id !== '-99') { + feature.id = id; + feature.properties.ct = getCentroid(feature); + } + } + // France (IS0_A3=FRA) is also acting weird using IS0_A3, // but using ISO_A3_EH seems to work ok // - https://github.com/nvkelso/natural-earth-vector/issues/284
take special care of Norway ... by using SOV_A3 as backup for ISO_A3 until Natural Earth issue is fixed
etpinard_sane-topojson
train
js
ef42f0c6ebd9dac48e2c4671fc4ea72026e22ee6
diff --git a/lib/xcode/registry.rb b/lib/xcode/registry.rb index <HASH>..<HASH> 100644 --- a/lib/xcode/registry.rb +++ b/lib/xcode/registry.rb @@ -71,6 +71,7 @@ module Xcode 'PBXFileReference' => FileReference, 'PBXGroup' => Group, 'PBXNativeTarget' => [Target, ConfigurationOwner], + 'PBXLegacyTarget' => [Target, ConfigurationOwner], 'PBXAggregateTarget' => Target, 'PBXFrameworksBuildPhase' => BuildPhase, 'PBXSourcesBuildPhase' => BuildPhase,
Added PBXLegacyTarget
rayh_xcoder
train
rb
1f4abd3e7678904176f9284f13a45c147d20f1bd
diff --git a/lib/firehose.rb b/lib/firehose.rb index <HASH>..<HASH> 100644 --- a/lib/firehose.rb +++ b/lib/firehose.rb @@ -17,4 +17,7 @@ module Firehose autoload :Server, 'firehose/server' autoload :Channel, 'firehose/channel' autoload :SwfPolicyRequest, 'firehose/swf_policy_request' -end \ No newline at end of file +end + +# Detect if Sprockets is loaded. If it is, lets configure Firehose to use it! +Firehose::Assets::Sprockets.auto_detect_configuration \ No newline at end of file diff --git a/lib/firehose/assets.rb b/lib/firehose/assets.rb index <HASH>..<HASH> 100644 --- a/lib/firehose/assets.rb +++ b/lib/firehose/assets.rb @@ -12,6 +12,12 @@ module Firehose env.append_path Assets.path('javascripts') env end + + def self.auto_detect_configuration + if defined? ::Sprockets + Firehose::Assets::Sprockets.configure + end + end end end end \ No newline at end of file
Moved autoload to root firehose.rb file so that assets would actually load if Sprockets is present
firehoseio_firehose
train
rb,rb
7fee2aeea3c8f79c1130fea71555fc1031f74ab9
diff --git a/code/model/Subsite.php b/code/model/Subsite.php index <HASH>..<HASH> 100644 --- a/code/model/Subsite.php +++ b/code/model/Subsite.php @@ -83,7 +83,7 @@ class Subsite extends DataObject implements PermissionProvider { * Corresponds to subfolder names within the /themes folder. By default, all themes contained in this folder * are listed. */ - protected static $allowed_themes = array(); + private static $allowed_themes = array(); /** * @var Boolean If set to TRUE, don't assume 'www.example.com' and 'example.com' are the same.
Defining static as private for <I>+
silverstripe_silverstripe-subsites
train
php
3be95e862299d671ad5263580b8ba971b680fc34
diff --git a/lib/gruff/base.rb b/lib/gruff/base.rb index <HASH>..<HASH> 100644 --- a/lib/gruff/base.rb +++ b/lib/gruff/base.rb @@ -417,9 +417,11 @@ module Gruff end # Return the graph as a rendered binary blob. - def to_blob(file_format = 'PNG') + # + # @param image_format [String] The image format of binary blob. + def to_blob(image_format = 'PNG') draw - Gruff::Renderer.to_blob(file_format) + Gruff::Renderer.to_blob(image_format) end protected
Doc: Add description in to_blob
topfunky_gruff
train
rb
b5a5465c1e2d4a8862a8a09c5565d2cb09c431d3
diff --git a/lib/rest-ftp-daemon/location.rb b/lib/rest-ftp-daemon/location.rb index <HASH>..<HASH> 100644 --- a/lib/rest-ftp-daemon/location.rb +++ b/lib/rest-ftp-daemon/location.rb @@ -49,8 +49,8 @@ module RestFtpDaemon # Build URI from parameters build_uri url + # Specific initializations case @uri - when URI::FILE then init_file when URI::S3 then init_aws # Match AWS URL with BUCKET.s3.amazonaws.com end @@ -147,9 +147,6 @@ module RestFtpDaemon end - def init_file - # Dir is absolute - @dir = File.join('/', @dir.to_s) end def init_aws @@ -166,9 +163,6 @@ module RestFtpDaemon # Credentials from config @aws_id = Conf.at(:credentials, @uri.host, :id) @aws_secret = Conf.at(:credentials, @uri.host, :secret) - - # Clear @dir - @dir = nil end def extract_filename path
location: update init_* methods
bmedici_rest-ftp-daemon
train
rb
2b7e21b31db7cd843a72e32b04ba90402f8f9b66
diff --git a/src/commands/alias/assign-alias.js b/src/commands/alias/assign-alias.js index <HASH>..<HASH> 100644 --- a/src/commands/alias/assign-alias.js +++ b/src/commands/alias/assign-alias.js @@ -29,7 +29,7 @@ async function assignAlias( let externalDomain = false; // If there was a previous deployment, we should fetch it to scale and downscale later - const prevDeployment = await fetchDeploymentFromAlias( + let prevDeployment = await fetchDeploymentFromAlias( output, now, contextName, @@ -37,9 +37,14 @@ async function assignAlias( deployment ); + // If there is an alias laying around that points to a deleted + // deployment, we need to account for it here. + if (prevDeployment instanceof Errors.DeploymentNotFound) { + prevDeployment = null; + } + if ( - prevDeployment instanceof Errors.DeploymentPermissionDenied || - prevDeployment instanceof Errors.DeploymentNotFound + prevDeployment instanceof Errors.DeploymentPermissionDenied ) { return prevDeployment; }
Allow re-using aliases that point to deleted deployments (#<I>) * Allow re-using aliases that point to deleted deployments * Removed
zeit_now-cli
train
js
2597d7312894a11eb988e39c2ff8111a41e12b0a
diff --git a/lang/en_utf8/assignment.php b/lang/en_utf8/assignment.php index <HASH>..<HASH> 100644 --- a/lang/en_utf8/assignment.php +++ b/lang/en_utf8/assignment.php @@ -69,7 +69,7 @@ $string['helpupload'] = '<p>This type of assignment allows each participant to u can be used to give each participant different file to work with.</p> <p>Participants may also enter notes describing the submitted files, progress status or any other text information.</p> <p>Submission of this type of assignment must be manually finalized by the participant. You can review the current status - at any time, unfinished assignments are marked af Draft. You can revert any ungraded assignment back to draft status.</p>'; + at any time, unfinished assignments are marked as Draft. You can revert any ungraded assignment back to draft status.</p>'; $string['helpuploadsingle'] = '<p>This type of assignment allows each participant to upload a single file, of any type.</p> <p>This might be a Word processor document, an image, a zipped web site, or anything you ask them to submit.</p>';
a small typo 'helpupload' on /lang/en_utf8/assignment.php MDL-<I>, found by Mitsuhiro Yoshida
moodle_moodle
train
php
f3a2cbe0eba574d71a8637b63111386e489fec0e
diff --git a/pendulum/pendulum.py b/pendulum/pendulum.py index <HASH>..<HASH> 100644 --- a/pendulum/pendulum.py +++ b/pendulum/pendulum.py @@ -273,11 +273,11 @@ class Pendulum(Date, datetime.datetime): return test_instance if tz is None or tz == 'local': - dt = datetime.datetime.now() + dt = datetime.datetime.now(cls._local_timezone()) elif tz is UTC or tz == 'UTC': - dt = datetime.datetime.utcnow().replace(tzinfo=UTC) + dt = datetime.datetime.now(UTC) else: - dt = datetime.datetime.utcnow().replace(tzinfo=UTC) + dt = datetime.datetime.now(UTC) tz = cls._safe_create_datetime_zone(tz) dt = tz.convert(dt) @@ -358,7 +358,7 @@ class Pendulum(Date, datetime.datetime): if cls.has_test_now(): now = cls.get_test_now().in_tz(tz) else: - now = datetime.datetime.utcnow().replace(tzinfo=UTC) + now = datetime.datetime.now(UTC) now = tz.convert(now) if year is None:
Improve Pendulum.now and Pendulum.utcnow performance (#<I>)
sdispater_pendulum
train
py
54f39b21df5953d119bcbc97162842982ff26c42
diff --git a/openquake/commonlib/logictree.py b/openquake/commonlib/logictree.py index <HASH>..<HASH> 100644 --- a/openquake/commonlib/logictree.py +++ b/openquake/commonlib/logictree.py @@ -1190,7 +1190,7 @@ class GsimLogicTree(object): def get_num_branches(self): """ - Return the number of realizations for branchset id, as a dictionary. + Return the number of branches for branchset id, as a dictionary. """ num = {} for branchset, branches in itertools.groupby( @@ -1200,10 +1200,9 @@ class GsimLogicTree(object): def get_num_paths(self): """ - Return the total number of paths for full enumeration - without doing a full enumeration. + Return the total number of paths in the tree. """ - # NB: the algorithm assume a symmetric logic tree for the GSIMs + # NB: the algorithm assume a symmetric logic tree for the GSIMs; # in the future we may relax such assumption num = 1 for val in self.get_num_branches().itervalues():
Fixed a docstring and a comment
gem_oq-engine
train
py
c55ef075e384dfc0715b1c3d1aee124f4a8eb421
diff --git a/src/backbone-validation.js b/src/backbone-validation.js index <HASH>..<HASH> 100644 --- a/src/backbone-validation.js +++ b/src/backbone-validation.js @@ -482,12 +482,14 @@ Backbone.Validation = (function(_){ var defaultAttributeLoaders = Validation.attributeLoaders = { inputNames: function (view) { var attrs = []; - view.$('form [name]').each(function () { - if (/^(?:input|select|textarea)$/i.test(this.nodeName) && this.name && - this.type !== 'submit' && attrs.indexOf(this.name) === -1) { - attrs.push(this.name); - } - }); + if (view) { + view.$('form [name]').each(function () { + if (/^(?:input|select|textarea)$/i.test(this.nodeName) && this.name && + this.type !== 'submit' && attrs.indexOf(this.name) === -1) { + attrs.push(this.name); + } + }); + } return attrs; } };
Check if view is assigned in inputNames
thedersen_backbone.validation
train
js
ae375cdcae0e3a1101582a7d4d0082633ab60e2c
diff --git a/lib/rails-api/application.rb b/lib/rails-api/application.rb index <HASH>..<HASH> 100644 --- a/lib/rails-api/application.rb +++ b/lib/rails-api/application.rb @@ -14,15 +14,6 @@ module Rails private - def ssl_module - if defined? ::ActionDispatch::SSL - ::ActionDispatch::SSL - else - require 'rack/ssl' - ::Rack::SSL - end - end - def setup_generators! generators = config.generators @@ -127,7 +118,8 @@ module Rails end if config.force_ssl - middleware.use ssl_module, config.ssl_options + require "rack/ssl" + middleware.use ::Rack::SSL, config.ssl_options end if config.action_dispatch.x_sendfile_header.present? @@ -173,6 +165,5 @@ module Rails end end end - end end
Remove ssl_module compatibility method Now that we have separate middleware stacks, we don't need this method to handle the differences anymore.
rails-api_rails-api
train
rb
367599909ca268c412298bd80fcd283b516b8dd8
diff --git a/anyconfig/inputs.py b/anyconfig/inputs.py index <HASH>..<HASH> 100644 --- a/anyconfig/inputs.py +++ b/anyconfig/inputs.py @@ -28,14 +28,14 @@ def is_input_obj(obj): """ :return: True if given something `obj` is a 'Input' namedtuple object. - >>> assert is_input_obj(1) == False - >>> assert is_input_obj("aaa") == False - >>> assert is_input_obj({}) == False + >>> assert not is_input_obj(1) + >>> assert not is_input_obj("aaa") + >>> assert not is_input_obj({}) + >>> assert not is_input_obj(('a', 1, {})) >>> inp = Input("/etc/hosts", PATH_STR, "/etc/hosts", None, open) >>> assert is_input_obj(inp) """ if isinstance(obj, tuple) and getattr(obj, "_asdict", False): - # I don't think there is another way to do that. return all(k in obj._asdict() for k in _INPUT_KEYS) return False
refactor: simplify doctest cases and add one more to .inputs.is_input_obj
ssato_python-anyconfig
train
py
96074950892da36a0cf8397f49a92e55295f347c
diff --git a/lib/migration.js b/lib/migration.js index <HASH>..<HASH> 100644 --- a/lib/migration.js +++ b/lib/migration.js @@ -6,9 +6,9 @@ var redefine = require('redefine'); module.exports = redefine.Class({ constructor: function(path) { - this.file = _path.basename(path); - this.path = path; - }, + this.path = _path.resolve(path); + this.file = _path.basename(this.path); + }, migration: function () { return require(this.path);
Resolve the passed path Resolve the passed path in order to get rid of relative paths.
sequelize_umzug
train
js
d39f9436044eb0ac34588559127149fc0e15bcb8
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -17,6 +17,7 @@ setup( dependency_links = [ 'http://sourceforge.net/projects/gnuplot-py/files/Gnuplot-py/1.8/gnuplot-py-1.8.tar.gz/download' ], + setup_requires = [ 'numpy==1.9.2' ], install_requires = [ 'numpy==1.9.2', 'Pint==0.5.1', 'PyModelFit==0.1.2', 'uncertainties==2.4.4'
setup: try setup_requires
tschaume_ccsgp_get_started
train
py
cd3fe7b435f8aa587238a81db780bbe1c3adffe2
diff --git a/bcbio/variation/coverage.py b/bcbio/variation/coverage.py index <HASH>..<HASH> 100644 --- a/bcbio/variation/coverage.py +++ b/bcbio/variation/coverage.py @@ -114,7 +114,8 @@ def incomplete_regions(chanjo_db, batch_name, out_dir): return out_file conn = sqlite3.connect(chanjo_db) c = conn.cursor() - q = c.execute("SELECT contig, start, end, strand, coverage, completeness " + q = c.execute("SELECT contig, start, end, strand, coverage, completeness, " + "sample_id " "FROM interval_data " "JOIN interval ON interval_data.parent_id=interval.id " "WHERE coverage < %d OR " @@ -124,7 +125,7 @@ def incomplete_regions(chanjo_db, batch_name, out_dir): with open(tx_out_file + ".tmp", "w") as out_handle: for line in q: line = [str(x) for x in line] - out_handle.write("\t".join([line[0], line[1], line[2], + out_handle.write("\t".join([line[0], line[1], line[2], line[6], line[3], line[4], line[5]]) + "\n") bt = BedTool(tx_out_file + ".tmp").sort().bgzip() shutil.move(bt, tx_out_file)
Add sample id to the batched missing coverage BED file.
bcbio_bcbio-nextgen
train
py
01c9a575180f4db7a7c627db02d6c834213a2a5d
diff --git a/src/Swoole/Request.php b/src/Swoole/Request.php index <HASH>..<HASH> 100644 --- a/src/Swoole/Request.php +++ b/src/Swoole/Request.php @@ -57,6 +57,12 @@ class Request $_SERVER['HTTPS'] = 'on'; } + // Fix REQUEST_URI with QUERY_STRING + if (isset($_SERVER['QUERY_STRING']) && strlen($_SERVER['QUERY_STRING']) > 0) { + $_SERVER['REQUEST_URI'] .= strpos($_SERVER['REQUEST_URI'], '?') === false ? '?' : ''; + $_SERVER['REQUEST_URI'] .= $_SERVER['QUERY_STRING']; + } + // Fix argv & argc if (!isset($_SERVER['argv'])) { $_SERVER['argv'] = isset($GLOBALS['argv']) ? $GLOBALS['argv'] : [];
fix REQUEST_URI with QUERY_STRING
hhxsv5_laravel-s
train
php
b514bd4841fbb69ad6da629ae26a016af1a886ef
diff --git a/tests/unit/components/sortable-group-test.js b/tests/unit/components/sortable-group-test.js index <HASH>..<HASH> 100644 --- a/tests/unit/components/sortable-group-test.js +++ b/tests/unit/components/sortable-group-test.js @@ -98,3 +98,31 @@ test('update', function(assert) { assert.deepEqual(items, expected, 'expected y positions to be applied to all but isDragging'); }); + +test('commit', function(assert) { + let items = [{ + y: 20, + model: 'bar' + }, { + y: 30, + model: 'baz' + }, { + y: 10, + model: 'foo' + }]; + let targetObject = Ember.Object.create({ + reorder(newOrder) { + this.newOrder = newOrder; + } + }); + let component = this.subject({ + items, + targetObject, + onChange: 'reorder' + }); + + component.commit(); + + assert.deepEqual(targetObject.newOrder, ['foo', 'bar', 'baz'], + 'expected target to receive models in order'); +});
Add unit test for sortable-group#commit
heroku_ember-sortable
train
js
f65337d4e96e72955b95fada2ca394a435b6cc82
diff --git a/core-bundle/src/EventListener/StoreRefererListener.php b/core-bundle/src/EventListener/StoreRefererListener.php index <HASH>..<HASH> 100644 --- a/core-bundle/src/EventListener/StoreRefererListener.php +++ b/core-bundle/src/EventListener/StoreRefererListener.php @@ -119,7 +119,7 @@ class StoreRefererListener } /** - * @return array<string,array<string,string> + * @return array<string,array<string,string>> */ private function prepareBackendReferer(string $refererId, array $referers = null): array {
[Core] Fix a missing bracket in the phpDoc (see #<I>) Description ----------- - Commits ------- <I>d<I>d Fix phpdoc (missing bracket)
contao_contao
train
php
051ae41f2c59393008dd2bc343886baa2ae752ea
diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -305,7 +305,15 @@ Game.prototype.removeFarChunks = function(playerPosition) { }) Object.keys(self.voxels.chunks).map(function(chunkIndex) { if (nearbyChunks.indexOf(chunkIndex) > -1) return - self.scene.remove(self.voxels.meshes[chunkIndex][self.meshType]) + var chunk = self.voxels.meshes[chunkIndex] + + self.scene.remove(chunk[self.meshType]) + chunk[self.meshType].geometry.dispose() + + delete chunk.data + delete chunk.geometry + delete chunk.meshed + delete chunk.surfaceMesh delete self.voxels.chunks[chunkIndex] }) self.voxels.requestMissingChunks(playerPosition)
Clean up voxel chunks more thoroughly. Use mesh.geometry.dispose() to clean up in-engine references and delete the remaining refrences on the voxel Mesh before removing completely from the index.
maxogden_voxel-engine
train
js
da75b02c8fbe1c92ea7fe972893ae07a3782d395
diff --git a/core/src/main/java/io/grpc/HandlerRegistry.java b/core/src/main/java/io/grpc/HandlerRegistry.java index <HASH>..<HASH> 100644 --- a/core/src/main/java/io/grpc/HandlerRegistry.java +++ b/core/src/main/java/io/grpc/HandlerRegistry.java @@ -25,7 +25,6 @@ import javax.annotation.concurrent.ThreadSafe; * Registry of services and their methods used by servers to dispatching incoming calls. */ @ThreadSafe -@ExperimentalApi("https://github.com/grpc/grpc-java/issues/933") public abstract class HandlerRegistry { /** diff --git a/core/src/main/java/io/grpc/ServerBuilder.java b/core/src/main/java/io/grpc/ServerBuilder.java index <HASH>..<HASH> 100644 --- a/core/src/main/java/io/grpc/ServerBuilder.java +++ b/core/src/main/java/io/grpc/ServerBuilder.java @@ -136,7 +136,6 @@ public abstract class ServerBuilder<T extends ServerBuilder<T>> { * @return this * @since 1.0.0 */ - @ExperimentalApi("https://github.com/grpc/grpc-java/issues/933") public abstract T fallbackHandlerRegistry(@Nullable HandlerRegistry fallbackRegistry); /**
core: Stabilize HandlerRegistry MutableHandlerRegistry from #<I> is not stabilized, so this is not enough to resolve the issue.
grpc_grpc-java
train
java,java
a85d9b826669b3650d4ec85b994553d2764c6030
diff --git a/lxd/storage/drivers/volume.go b/lxd/storage/drivers/volume.go index <HASH>..<HASH> 100644 --- a/lxd/storage/drivers/volume.go +++ b/lxd/storage/drivers/volume.go @@ -354,8 +354,8 @@ func (v Volume) NewVMBlockFilesystemVolume() Volume { } // SetQuota calls SetVolumeQuota on the Volume's driver. -func (v Volume) SetQuota(size string, op *operations.Operation) error { - return v.driver.SetVolumeQuota(v, size, op) +func (v Volume) SetQuota(size string, allowUnsafeResize bool, op *operations.Operation) error { + return v.driver.SetVolumeQuota(v, size, allowUnsafeResize, op) } // SetConfigSize sets the size config property on the Volume (does not resize volume).
lxd/storage/drivers/volume: Add allowUnsafeResize arg to SetQuota and pass to SetVolumeQuota
lxc_lxd
train
go
0cef5f2a4f58145dc6d763c4a6eb0541f6401039
diff --git a/flake8_future_import.py b/flake8_future_import.py index <HASH>..<HASH> 100755 --- a/flake8_future_import.py +++ b/flake8_future_import.py @@ -166,7 +166,7 @@ def main(args): return parser = argparse.ArgumentParser() choices = set(10 + feature.index for feature in FEATURES.values()) - choices |= set(50 + choice for choice in choices) | set([90]) + choices |= set(40 + choice for choice in choices) | set([90]) choices = set('FI{0}'.format(choice) for choice in choices) parser.add_argument('--ignore', help='Ignore the given comma-separated ' 'codes')
Fix defining upper error codes The upper error codes (`FI<I>` and greater) accidentally used `FI<I>` and greater.
xZise_flake8-future-import
train
py
49ca87364e772f556c1ba21bdd54cc3541628475
diff --git a/lib/flipper/ui/action.rb b/lib/flipper/ui/action.rb index <HASH>..<HASH> 100644 --- a/lib/flipper/ui/action.rb +++ b/lib/flipper/ui/action.rb @@ -56,9 +56,10 @@ module Flipper @flipper, @request = flipper, request @code = 200 @headers = {"Content-Type" => "text/plain"} - @breadcrumbs = [] - if app_path - @breadcrumbs << Breadcrumb.new("App", app_path) + @breadcrumbs = if Flipper::UI.app_path + [Breadcrumb.new("App", Flipper::UI.app_path)] + else + [] end end @@ -197,17 +198,6 @@ module Flipper request.env['SCRIPT_NAME'] end - # Internal: Allows the "App" breadcrumb to be: - # - # - Turned off via: `Flipper::UI.app_path = false` - # - Set to a specific value via: `Flipper::UI.app_path = '/admin'` - # - Set to the referer (if available) or root of the parent application - def app_path - if Flipper::UI.app_path != false - Flipper::UI.app_path || request.env['HTTP_REFERER'] || '/' - end - end - # Private def views_path self.class.views_path
Only show app crumb if app path set Using referrer means that clicking around makes app show up.
jnunemaker_flipper
train
rb
c491c62d0e18959c41701611b14ca0414fab37bf
diff --git a/scriptworker/cot/verify.py b/scriptworker/cot/verify.py index <HASH>..<HASH> 100644 --- a/scriptworker/cot/verify.py +++ b/scriptworker/cot/verify.py @@ -1022,10 +1022,8 @@ async def verify_docker_image_task(chain, link): errors.append("{} is not a valid docker-image workerType!".format(worker_type)) # XXX remove the command checks once we have a vetted decision task # from in-tree yaml - command = link.task['payload'].get('command') - # this path changed to /builds in https://bugzilla.mozilla.org/show_bug.cgi?id=1394883 - if command != ["/bin/bash", "-c", "/home/worker/bin/build_image.sh"] and command != ['/bin/bash', '-c', '/builds/worker/bin/build_image.sh']: - errors.append("{} {} illegal command {}!".format(link.name, link.task_id, command)) + if link.task['payload'].get('command') and link.task['payload']['command'] != ["/bin/bash", "-c", "/home/worker/bin/build_image.sh"]: + errors.append("{} {} illegal command {}!".format(link.name, link.task_id, link.task['payload']['command'])) raise_on_errors(errors)
Bug <I> - revert build_image check (#<I>) The normal case here is no command -- /home/builds is allowed for legacy build-image tasks only.
mozilla-releng_scriptworker
train
py
f178b29e354bcf06fa172fec7e1585c61b278ada
diff --git a/furious/context/_local.py b/furious/context/_local.py index <HASH>..<HASH> 100644 --- a/furious/context/_local.py +++ b/furious/context/_local.py @@ -55,7 +55,7 @@ def _init(): # If there is a context and it is initialized to this request, # return, otherwise reinitialize the _local_context. if (hasattr(_local_context, '_initialized') and - _local_context._initialized == os.environ['REQUEST_ID_HASH']): + _local_context._initialized == os.environ.get('REQUEST_ID_HASH')): return # Used to track the context object stack. @@ -66,7 +66,7 @@ def _init(): _local_context._executing_async = [] # So that we do not inadvertently reinitialize the local context. - _local_context._initialized = os.environ['REQUEST_ID_HASH'] + _local_context._initialized = os.environ.get('REQUEST_ID_HASH') return _local_context
Use get method when checking REQUEST_ID_HASH. This will prevent unit tests from blowing up when not set. Resolves issue #<I>.
Workiva_furious
train
py
73296edf7f42ee9db8a9895c9cd709868bc3faa2
diff --git a/fluids/safety_valve.py b/fluids/safety_valve.py index <HASH>..<HASH> 100644 --- a/fluids/safety_valve.py +++ b/fluids/safety_valve.py @@ -293,7 +293,11 @@ def API520_N(P1): .. [1] API Standard 520, Part 1 - Sizing and Selection. ''' P1 = P1/1000. # Pa to kPa - return (0.02764*P1-1000.)/(0.03324*P1-1061) + if P1 =< 10339: + KN = 1 + else: + KN = (0.02764*P1-1000.)/(0.03324*P1-1061) + return KN _KSH_psigs = [15, 20, 40, 60, 80, 100, 120, 140, 160, 180, 200, 220, 240, 260,
Bug fix, for KN at P =< <I> kPa
CalebBell_fluids
train
py
ca855b3d6d35b6f0dba7132a2f07ce68b2320c1b
diff --git a/src/ServiceProvider.php b/src/ServiceProvider.php index <HASH>..<HASH> 100644 --- a/src/ServiceProvider.php +++ b/src/ServiceProvider.php @@ -7,13 +7,6 @@ use Illuminate\Support\ServiceProvider as IlluminateServiceProvider; class ServiceProvider extends IlluminateServiceProvider { /** - * Indicates if loading of the provider is deferred. - * - * @var bool - */ - protected $defer = true; - - /** * Configure package paths. */ private function configurePaths() @@ -34,16 +27,6 @@ class ServiceProvider extends IlluminateServiceProvider } /** - * Get the services provided by the provider. - * - * @return array - */ - public function provides() - { - return ['pragmarx.google2fa']; - } - - /** * Register the service provider. * * @return void @@ -53,7 +36,10 @@ class ServiceProvider extends IlluminateServiceProvider $this->app->singleton('pragmarx.google2fa', function ($app) { return $app->make(Google2FA::class); }); + } + public function boot() + { $this->configurePaths(); $this->mergeConfig();
Fix ServiceProvider issues (#<I>) * Fix ServiceProvider issues
antonioribeiro_google2fa-laravel
train
php
ee823e40ebac625d3cc5c33751c7573950c3c614
diff --git a/js/feature/featureTrack.js b/js/feature/featureTrack.js index <HASH>..<HASH> 100755 --- a/js/feature/featureTrack.js +++ b/js/feature/featureTrack.js @@ -244,9 +244,12 @@ class FeatureTrack extends TrackBase { const data = []; for (let feature of features) { - const featureData = (typeof feature.popupData === "function") ? - feature.popupData(genomicLocation) : - this.extractPopupData(feature._f || feature); + // Whole genome hack, whole-genome psuedo features store the "real" feature in an _f field + const f = feature._f || feature; + + const featureData = (typeof f.popupData === "function") ? + f.popupData(genomicLocation) : + this.extractPopupData(f); if (featureData) { @@ -256,7 +259,6 @@ class FeatureTrack extends TrackBase { // If we have an infoURL, find the name property and create the link. We do this at this level // to catch name properties in both custom popupData functions and the generic extractPopupData function - const infoURL = this.infoURL || this.config.infoURL; if(infoURL) { for (let fd of featureData) {
feature track popup text change for whole genome view
igvteam_igv.js
train
js
00ff08b14547e29aae28fa3f07de2ccd09f04aa3
diff --git a/components/cdn.js b/components/cdn.js index <HASH>..<HASH> 100644 --- a/components/cdn.js +++ b/components/cdn.js @@ -76,6 +76,9 @@ SteamUser.prototype.getDepotDecryptionKey = function(appID, depotID, callback) { // Cached locally? var self = this; + appID = parseInt(appID, 10); + depotID = parseInt(depotID, 10); + this.storage.readFile("depot_key_" + appID + "_" + depotID + ".bin", function(err, file) { if (file && Math.floor(Date.now() / 1000) - file.readUInt32LE(0) < (60 * 60 * 24 * 14)) { callback(null, file.slice(4));
Parse appID and depotID as ints, if they got passed as strings
DoctorMcKay_node-steam-user
train
js
602fba9aba4db7bec069edf884cbe48a4be7296e
diff --git a/sos/remote.py b/sos/remote.py index <HASH>..<HASH> 100755 --- a/sos/remote.py +++ b/sos/remote.py @@ -103,7 +103,7 @@ class RemoteHost: def map_path(self, source): result = {} if isinstance(source, str): - dest = os.path.abspath(source) + dest = os.path.abspath(os.path.expanduser(source)) for k,v in self.path_map.items(): if dest.startswith(k): dest = v + dest[len(k):]
Handle ~ in path_map
vatlab_SoS
train
py
8e6c0a54899e7cf56a9b3112f4a9eff84523eedc
diff --git a/src/MaxMind/MinFraud.php b/src/MaxMind/MinFraud.php index <HASH>..<HASH> 100644 --- a/src/MaxMind/MinFraud.php +++ b/src/MaxMind/MinFraud.php @@ -352,8 +352,7 @@ class MinFraud } catch (ValidationExceptionInterface $exception) { throw new InvalidInputException( $exception->getMessage(), - $exception->getCode(), - $exception + $exception->getCode() ); } }
Don't pass previous exception as it leaks too many implementation details
maxmind_minfraud-api-php
train
php
7e851d3eb320755e10dbb9e370b800bc6fbea5ed
diff --git a/lib/web_console/repl/irb.rb b/lib/web_console/repl/irb.rb index <HASH>..<HASH> 100644 --- a/lib/web_console/repl/irb.rb +++ b/lib/web_console/repl/irb.rb @@ -12,13 +12,14 @@ module WebConsole # Monkey patch the reference Irb class so that the unqualified prints go # to the context's output method. class ::IRB::Irb - def print(*args) - @context.instance_variable_get(:@output_method).print(*args) - end + private + def print(*args) + @context.instance_variable_get(:@output_method).print(*args) + end - def printf(str, *args) - @context.instance_variable_get(:@output_method).print(str % args) - end + def printf(str, *args) + @context.instance_variable_get(:@output_method).print(str % args) + end end class StringIOInputMethod < ::IRB::InputMethod
Privatize the monkey patched methods No need for them to clutter the public methods.
gsamokovarov_web-console-rails3
train
rb
bd110895a8918955db889396c10b36ef0fa6d3c7
diff --git a/src/Document.php b/src/Document.php index <HASH>..<HASH> 100644 --- a/src/Document.php +++ b/src/Document.php @@ -413,10 +413,8 @@ class Document implements DataStoreInterface, HasParentsInterface, \ArrayAccess, return $this->_data[$name]; } elseif ($this instanceof Model && $schema->hasRelation($fieldName)) { $related = $schema->relation($fieldName)->get($this); - if ($related instanceof HasParentsInterface) { - $related->setParent($this, $name); - } - return $this->_data[$name] = $related; + $this->_set($name, $related); + return $related; } elseif (isset($field['type']) && $field['type'] === 'object') { $value = []; } else { diff --git a/src/Schema.php b/src/Schema.php index <HASH>..<HASH> 100644 --- a/src/Schema.php +++ b/src/Schema.php @@ -938,6 +938,10 @@ class Schema return $data; } + if (!is_array($data)) { + return; + } + $class = ltrim($options['class'], '\\'); $config = [
Make to minor refactoring.
crysalead_chaos-orm
train
php,php
279777f2f265405236bf3b4ab7b974e80f84a4ed
diff --git a/flink-runtime/src/main/java/org/apache/flink/runtime/taskmanager/Task.java b/flink-runtime/src/main/java/org/apache/flink/runtime/taskmanager/Task.java index <HASH>..<HASH> 100644 --- a/flink-runtime/src/main/java/org/apache/flink/runtime/taskmanager/Task.java +++ b/flink-runtime/src/main/java/org/apache/flink/runtime/taskmanager/Task.java @@ -638,6 +638,7 @@ public class Task implements Runnable { else { if (STATE_UPDATER.compareAndSet(this, current, ExecutionState.FAILED)) { // proper failure of the task. record the exception as the root cause + LOG.error("Task execution failed. ", t); failureCause = t; cancelInvokable();
[FLINK-<I>] Log Exceptions by Invokable This closes #<I>
apache_flink
train
java
af5adb1ad212683f52ac3153493bad1dfe2becf8
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ setup( # Versions should comply with PEP440. For a discussion on single-sourcing # the version across setup.py and the project code, see # https://packaging.python.org/en/latest/single_source_version.html - version='0.0.2', + version='0.0.3', description='Convenience tools for web3.py', long_description=long_description, @@ -60,5 +60,5 @@ setup( packages=find_packages(exclude=['tests', 'venv']), # https://packaging.python.org/en/latest/requirements.html - install_requires=['psutil>5,<6', 'toolz<1', 'web3==3.10.0'], + install_requires=['psutil>5,<6', 'toolz<1', 'web3==3.11.0'], )
web3.py update for bugfixes * eth.syncing was crashing when it was False * clearer message when trying to access contract that's not deployed * validates address checksum in contract contstructor
carver_web3utils.py
train
py
96771632b1942eb694d31b2574d57cc2fa0bac47
diff --git a/test/range-parser.js b/test/range-parser.js index <HASH>..<HASH> 100644 --- a/test/range-parser.js +++ b/test/range-parser.js @@ -46,6 +46,13 @@ describe('parseRange(len, str)', function(){ assert.deepEqual(range[0], { start: 400, end: 999 }) }) + it('should parse "bytes=0-"', function(){ + var range = parse(1000, 'bytes=0-') + assert.strictEqual(range.type, 'bytes') + assert.strictEqual(range.length, 1) + assert.deepEqual(range[0], { start: 0, end: 999 }) + }) + it('should parse str with no bytes', function(){ var range = parse(1000, 'bytes=0-0') assert.strictEqual(range.type, 'bytes')
tests: add test for "bytes=0-" closes #<I>
jshttp_range-parser
train
js
b9220994a6ca62a97a1e53dab2e1617663bc34a2
diff --git a/taskqueue/aws_queue_api.py b/taskqueue/aws_queue_api.py index <HASH>..<HASH> 100644 --- a/taskqueue/aws_queue_api.py +++ b/taskqueue/aws_queue_api.py @@ -33,10 +33,14 @@ class AWSTaskQueueAPI(object): @property def enqueued(self): - return int(self.status()['ApproximateNumberOfMessages']) + status = self.status() + return int(status['ApproximateNumberOfMessages']) + int(status['ApproximateNumberOfMessagesNotVisible']) def status(self): - resp = self._sqs.get_queue_attributes(QueueUrl=self._qurl, AttributeNames=['ApproximateNumberOfMessages']) + resp = self._sqs.get_queue_attributes( + QueueUrl=self._qurl, + AttributeNames=['ApproximateNumberOfMessages', 'ApproximateNumberOfMessagesNotVisible'] + ) return resp['Attributes'] def insert(self, task):
fix: enqueued measures both visible and non-visible messages on aws
seung-lab_python-task-queue
train
py
4983b8bd57c3fb4fa1f81dcd59bff1792b4c823c
diff --git a/acceptance/tests/apply/puppet_apply_trace.rb b/acceptance/tests/apply/puppet_apply_trace.rb index <HASH>..<HASH> 100644 --- a/acceptance/tests/apply/puppet_apply_trace.rb +++ b/acceptance/tests/apply/puppet_apply_trace.rb @@ -2,6 +2,6 @@ test_name 'puppet apply --trace should provide a stack trace' agents.each do |agent| on(agent, puppet('apply --trace -e "blue < 2"'), :acceptable_exit_codes => 1) do - assert_match(/apply\.rb.*in `main'.*command_line.*in `execute'.*puppet.*in `<main>'/m, stderr, "Did not print expected stack trace on stderr") + assert_match(/\.rb:\d+:in `\w+'/m, stderr, "Did not print expected stack trace on stderr") end end
(maint) fix stacktrace matcher in apply trace test The first commit of this test had an overly specific stack trace matcher which did not work on certain ruby stacks. This change uses a different match that should only match stack traces but be forgiving to changes in the code and across ruby versions.
puppetlabs_puppet
train
rb
b9727e22850393c66739b29844d04d8b078fa82a
diff --git a/grimoire_elk/ocean/remo.py b/grimoire_elk/ocean/remo.py index <HASH>..<HASH> 100644 --- a/grimoire_elk/ocean/remo.py +++ b/grimoire_elk/ocean/remo.py @@ -41,20 +41,20 @@ class Mapping(BaseMapping): if es_major != '2': mapping = ''' - { - "dynamic":true, + { + "dynamic":true, + "properties": { + "data": { "properties": { - "data": { - "properties": { - "description": { - "type": "text", - "index": true - } - } + "description": { + "type": "text", + "index": true } } + } } - ''' + } + ''' else: mapping = ''' {
[ocean] Fix ES <I>.x mappings for Remo ocean backend This patch fixes the mappings for Remo, which now work also with ES <I>.x
chaoss_grimoirelab-elk
train
py
fd552d38fdaff0fa16b0e5752ac3c945c234350b
diff --git a/ipyleaflet/leaflet.py b/ipyleaflet/leaflet.py index <HASH>..<HASH> 100644 --- a/ipyleaflet/leaflet.py +++ b/ipyleaflet/leaflet.py @@ -467,6 +467,13 @@ class LayerGroup(Layer): raise LayerException('layer not on in layergroup: %r' % layer) self.layers = tuple([l for l in self.layers if l.model_id != layer.model_id]) + def substitute_layer(self, old, new): + if isinstance(new, dict): + new = basemap_to_tiles(new) + if old.model_id not in self._layer_ids: + raise LayerException('Could not substitute layer: layer not in layergroup.') + self.layers = tuple([new if l.model_id == old.model_id else l for l in self.layers]) + def clear_layers(self): self.layers = ()
Add substitute_layer to LayerGroup class This maintains consistancy with Map class.
jupyter-widgets_ipyleaflet
train
py
32bcbbc90b12508b17d0b3f2ec7d8d1cea4288f6
diff --git a/src/Http/Output.php b/src/Http/Output.php index <HASH>..<HASH> 100644 --- a/src/Http/Output.php +++ b/src/Http/Output.php @@ -526,7 +526,7 @@ class Output extends Message\Response */ public function sendPayload(array $data, $mimeType = null) { - $mimeType = isset($mimeType) ? $mimeType : $this->mimeType; + $mimeType = isset($mimeType) ? $mimeType : 'application/json'; $this->setContentType($mimeType); if ($mimeType === 'application/json') {
change default payload to application/json
o2system_kernel
train
php
fc419dc7054679da658f3c4a0157250d846256b7
diff --git a/tests/PHPUnit/Integration/ReleaseCheckListTest.php b/tests/PHPUnit/Integration/ReleaseCheckListTest.php index <HASH>..<HASH> 100644 --- a/tests/PHPUnit/Integration/ReleaseCheckListTest.php +++ b/tests/PHPUnit/Integration/ReleaseCheckListTest.php @@ -101,6 +101,8 @@ class ReleaseCheckListTest extends \PHPUnit_Framework_TestCase public function test_screenshotsStoredInLfs() { $screenshots = Filesystem::globr(PIWIK_INCLUDE_PATH . '/tests/UI/expected-screenshots', '*.png'); + $screenshotsPlugins = Filesystem::globr(PIWIK_INCLUDE_PATH . '/plugins/*/tests/UI/expected-screenshots', '*.png'); + $screenshots = array_merge($screenshots, $screenshotsPlugins); $cleanPath = function ($value) { return str_replace(PIWIK_INCLUDE_PATH . '/', '', $value); };
Improve ReleaseChecklistTest to check also plugin UI files in lfs
matomo-org_matomo
train
php
c58680945b68bc44c2f1b3430e1bdbf5a6b0e9ff
diff --git a/modules/adpod.js b/modules/adpod.js index <HASH>..<HASH> 100644 --- a/modules/adpod.js +++ b/modules/adpod.js @@ -1,6 +1,6 @@ /** * This module houses the functionality to evaluate and process adpod adunits/bids. Specifically there are several hooked functions, - * that either supplement the base function (ie to check something additional or unique to adpod objects) or to replace the base funtion + * that either supplement the base function (ie to check something additional or unique to adpod objects) or to replace the base function * entirely when appropriate. * * Brief outline of each hook:
Fix a typo in code comment (#<I>)
prebid_Prebid.js
train
js
5371b388195c77c6940d982fde2b2a3265e8c5e5
diff --git a/src/Console/PermaconScanCommand.php b/src/Console/PermaconScanCommand.php index <HASH>..<HASH> 100644 --- a/src/Console/PermaconScanCommand.php +++ b/src/Console/PermaconScanCommand.php @@ -3,12 +3,13 @@ /************************ * * Rys - Furkan Kadıoğlu -* May - 2016 +* July - 2016 * http://github.com/furkankadioglu * *************************/ use Illuminate\Console\Command; +use Illuminate\Filesystem\Filesystem; class PermaconScanCommand extends Command { @@ -21,11 +22,14 @@ class PermaconScanCommand extends Command { { $this->info('+ Scanning configration files'); - $files = scandir(config_path()); + $files = array_slice(scandir(config_path()), 2); + foreach($files as $file) { - $this->info($file); + $this->info("Scanned and Generated Copy: ".$file); + $content = file_get_contents(config_path()."/".$file); + $save = file_put_contents(storage_path()."/permacon/".$file, $content); } }
Scan command for artisan Scan command; scanning all config directory and generating copies.
furkankadioglu_Permacon
train
php
79685fd62bff19dcdb4f9b806619578f95fc67b9
diff --git a/cmd/juju/service/upgradecharm.go b/cmd/juju/service/upgradecharm.go index <HASH>..<HASH> 100644 --- a/cmd/juju/service/upgradecharm.go +++ b/cmd/juju/service/upgradecharm.go @@ -193,7 +193,15 @@ func (c *upgradeCharmCommand) Run(ctx *cmd.Context) error { return err } - ids, err := handleResources(c, c.Resources, c.ServiceName, charmInfo.Meta.Resources) + metaRes := charmInfo.Meta.Resources + // only include resource metadata for the files we're actually uploading + for name, _ := range charmInfo.Meta.Resources { + if _, ok := c.Resources[name]; !ok { + delete(metaRes, name) + } + } + + ids, err := handleResources(c, c.Resources, c.ServiceName, metaRes) if err != nil { return errors.Trace(err) }
make sure we only include metadata for the resources we're actually uploading
juju_juju
train
go
f7bf96893c416b6241d1d1d3ef6a01b447783e10
diff --git a/code/forms/OrderForm.php b/code/forms/OrderForm.php index <HASH>..<HASH> 100644 --- a/code/forms/OrderForm.php +++ b/code/forms/OrderForm.php @@ -10,6 +10,9 @@ class OrderForm extends Form { function __construct($controller, $name) { + //clear old messages... + $this->clearMessage(); + //Requirements::themedCSS('OrderForm'); Requirements::javascript('ecommerce/javascript/OrderForm.js'); @@ -121,6 +124,7 @@ class OrderForm extends Form { //allow updating via decoration $this->extend('updateForm',$this); + }
clearing old message - as without it, the old message would hang around....
silvershop_silvershop-core
train
php
9798b4a409a80aba457c07024f9c3d405e3c038d
diff --git a/src/index.integration-spec.js b/src/index.integration-spec.js index <HASH>..<HASH> 100644 --- a/src/index.integration-spec.js +++ b/src/index.integration-spec.js @@ -13,21 +13,21 @@ describe('[integration] sagui', function () { fs.copySync(projectFixture, projectPath) }) - it('should configure webpack', () => { - const webpack = sagui({ projectPath }).webpack - expect(webpack.length).to.equal(1) - }) - - it('should configure karma', () => { - const karma = sagui({ projectPath }).karma - expect(karma.webpack).to.exist - }) - describe('after install', () => { beforeEach(() => { sagui({ projectPath, action: 'install' }).run() }) + it('should configure webpack', () => { + const webpack = sagui({ projectPath }).webpack + expect(webpack.length).to.equal(1) + }) + + it('should configure karma', () => { + const karma = sagui({ projectPath }).karma + expect(karma.webpack).to.exist + }) + it('should be possible to build', () => { return sagui({ projectPath, action: 'build' }).run() })
Integration tests can no longer rely on having a default pages 👻
saguijs_sagui
train
js
22414746e584a0c4a7cecf3ae85a7e30a5d4e726
diff --git a/src/sap.m/src/sap/m/Table.js b/src/sap.m/src/sap/m/Table.js index <HASH>..<HASH> 100644 --- a/src/sap.m/src/sap/m/Table.js +++ b/src/sap.m/src/sap/m/Table.js @@ -5,7 +5,6 @@ // Provides control sap.m.Table. sap.ui.define([ "sap/ui/core/Core", - "sap/ui/core/Element", "sap/ui/Device", "./library", "./ListBase", @@ -23,7 +22,7 @@ sap.ui.define([ // jQuery custom selectors ":sapTabbable" "sap/ui/dom/jquery/Selectors" ], - function(Core, Element, Device, library, ListBase, ListItemBase, CheckBox, TableRenderer, Log, BaseObject, ResizeHandler, PasteHelper, KeyCodes, jQuery, ListBaseRenderer, Icon) { + function(Core, Device, library, ListBase, ListItemBase, CheckBox, TableRenderer, Log, BaseObject, ResizeHandler, PasteHelper, KeyCodes, jQuery, ListBaseRenderer, Icon) { "use strict";
[INTERNAL] m.Table/ui.Table: Improve navigation from MessagePopover to Table Remove core.Element definition as was not needed any more. Fix for commit <I> JIRA: CPOUIFTEAMB-<I> Change-Id: I<I>bcde<I>d3d<I>cb<I>a<I>b<I>c<I>
SAP_openui5
train
js
19b2b268a3f209891ada635129dd33101074baaa
diff --git a/fault/tester.py b/fault/tester.py index <HASH>..<HASH> 100644 --- a/fault/tester.py +++ b/fault/tester.py @@ -1,3 +1,4 @@ +import magma from bit_vector import BitVector import fault.actions as actions from fault.verilator_target import VerilatorTarget @@ -44,6 +45,8 @@ class Tester: def __init__(self, circuit, clock=None): self.circuit = circuit self.actions = [] + if clock is not None and not isinstance(clock, magma.ClockKind): + raise TypeError(f"Expected clock port: {clock}") self.clock = clock def make_target(self, target, **kwargs):
Check that clock is a clock type
leonardt_fault
train
py
afadd231f4a05587c8c7b6d63a11f7e85ca10ca8
diff --git a/recordlinkage/comparing.py b/recordlinkage/comparing.py index <HASH>..<HASH> 100644 --- a/recordlinkage/comparing.py +++ b/recordlinkage/comparing.py @@ -178,8 +178,9 @@ def exact(s1, s2, missing_value=0, disagreement_value=0, output='any', return_ag compare = (s1 == s2) compare = compare.astype(int) else: - compare = s1[(s1 == s2)] - compare[(s1 != s2)] = disagreement_value + compare = s1.copy() + print compare + compare.loc[(s1 != s2)] = disagreement_value # Only for missing values compare[_missing(df1, df2)] = missing_value
Fix bug in comparing with agreement levels
J535D165_recordlinkage
train
py
530f515556e60415d090148a22b7aa31393a2695
diff --git a/lib/Doctrine/ORM/Mapping/Builder/ClassMetadataBuilder.php b/lib/Doctrine/ORM/Mapping/Builder/ClassMetadataBuilder.php index <HASH>..<HASH> 100644 --- a/lib/Doctrine/ORM/Mapping/Builder/ClassMetadataBuilder.php +++ b/lib/Doctrine/ORM/Mapping/Builder/ClassMetadataBuilder.php @@ -59,9 +59,9 @@ class ClassMetadataBuilder /** * Adds and embedded class * - * @param string $fieldName - * @param string $class - * @param string|null $columnPrefix + * @param string $fieldName + * @param string $class + * @param string|false|null $columnPrefix * * @return $this */
Added "false" value to $columnPrefix type declaration. (#<I>)
doctrine_orm
train
php
67460a2bbbe9c4e465f1d4e2a5a8ba4ceff71fdb
diff --git a/src/com/googlecode/objectify/util/Monotonic.java b/src/com/googlecode/objectify/util/Monotonic.java index <HASH>..<HASH> 100644 --- a/src/com/googlecode/objectify/util/Monotonic.java +++ b/src/com/googlecode/objectify/util/Monotonic.java @@ -69,7 +69,11 @@ public class Monotonic { Field f = TypeUtils.getDeclaredField(entityClass, fieldName); f.setAccessible(true); - return f.getLong(thing); + Number n = (Number)f.get(thing); + if (n == null) + return 0; + else + return n.longValue(); } catch (Exception e) { throw new IllegalStateException(e); } }
Monotonic is now somewhat more graceful about handling different numeric field types (Long, int, etc). git-svn-id: <URL>
objectify_objectify
train
java
19e9cf6f7bc4c693cf5047625ac6dfa201e53db1
diff --git a/lib/pyfrc/physics/core.py b/lib/pyfrc/physics/core.py index <HASH>..<HASH> 100644 --- a/lib/pyfrc/physics/core.py +++ b/lib/pyfrc/physics/core.py @@ -361,7 +361,7 @@ class PhysicsInterface: with self._lock: self.vx += self.start_x - self.x self.vy += self.start_y - self.y - self._update_gyros(-self.angle) + self._update_gyros(self.start_angle - self.angle) self.x = self.start_x self.y = self.start_y
Fix resetting angle if start angle isn't 0
robotpy_pyfrc
train
py
ed1cdd230b7dbd1a9089c3e13eb7d55827b79d90
diff --git a/lib/engine_http.js b/lib/engine_http.js index <HASH>..<HASH> 100644 --- a/lib/engine_http.js +++ b/lib/engine_http.js @@ -435,13 +435,20 @@ HttpEngine.prototype.compile = function compile(tasks, scenarioSpec, ee) { initialContext._successCount = 0; initialContext._jar = request.jar(); - + let keepAliveMsec = 30 * 1000; + if (self.config.http && self.config.http.keepAlive) { + keepAliveMsec = self.config.http.keepAlive * 1000; + } + let maxSockets = 1; + if (self.config.http && self.config.http.maxSockets) { + maxSockets = self.config.http.maxSockets; + } if (!self.pool) { let agentOpts = { keepAlive: true, - keepAliveMsecs: 1000, - maxSockets: 1, - maxFreeSockets: 1 + keepAliveMsecs: keepAliveMsec, + maxSockets: maxSockets, + maxFreeSockets: maxSockets }; // FIXME: This won't work if we have a pool - needs to be set in agentOptions
feat(http): Increase default keep alive timeout; allow agent config - Default keep alive timeout is now <I>s - Keep alive timeout may be configured via config.http.keepAlive - Max number of sockets may be configured via config.http.maxSockets
artilleryio_artillery
train
js
2dffcd13e4026a31e665ba8f2c9016b51108bf17
diff --git a/closure/goog/dom/abstractrange.js b/closure/goog/dom/abstractrange.js index <HASH>..<HASH> 100644 --- a/closure/goog/dom/abstractrange.js +++ b/closure/goog/dom/abstractrange.js @@ -14,7 +14,6 @@ goog.provide('goog.dom.AbstractRange'); goog.provide('goog.dom.RangeIterator'); goog.provide('goog.dom.RangeType'); -goog.forwardDeclare('goog.dom.SavedCaretRange'); goog.require('goog.dom'); goog.require('goog.dom.NodeType'); goog.require('goog.dom.TagIterator'); @@ -447,8 +446,7 @@ goog.dom.AbstractRange.prototype.saveUsingDom = goog.abstractMethod; * Saves the range using HTML carets. As long as the carets remained in the * HTML, the range can be restored...even when the HTML is copied across * documents. - * // TODO(user) return type should become AbstractSavedCaretRange - * @return {?goog.dom.SavedCaretRange} A range representation that can + * @return {?goog.dom.AbstractSavedCaretRange} A range representation that can * be restored as long as carets are not removed. Returns null if carets * could not be created. * @abstract
Remove the last forwardDeclare from the DOM package. RELNOTES: n/a PiperOrigin-RevId: <I>
google_closure-library
train
js
47db156d4273fbc79d20b7461e8419e13a796445
diff --git a/railties/lib/rails/paths.rb b/railties/lib/rails/paths.rb index <HASH>..<HASH> 100644 --- a/railties/lib/rails/paths.rb +++ b/railties/lib/rails/paths.rb @@ -55,13 +55,13 @@ module Rails end def []=(path, value) - value = Path.new(self, path, value) unless value.is_a?(Path) + value = Path.new(self, path, [value].flatten) unless value.is_a?(Path) super(path, value) end def add(path, options={}) with = options[:with] || path - self[path] = Path.new(self, path, with, options) + self[path] = Path.new(self, path, [with].flatten, options) end def all_paths @@ -104,9 +104,8 @@ module Rails attr_reader :path attr_accessor :glob - def initialize(root, current, *paths) - options = paths.last.is_a?(::Hash) ? paths.pop : {} - super(paths.flatten) + def initialize(root, current, paths, options = {}) + super(paths) @current = current @root = root
stop using *args in order to simplify our constructor
rails_rails
train
rb
bf4503284321b130a04479e1cb82c997e967486f
diff --git a/lib/ougai/logger.rb b/lib/ougai/logger.rb index <HASH>..<HASH> 100644 --- a/lib/ougai/logger.rb +++ b/lib/ougai/logger.rb @@ -17,7 +17,7 @@ module Ougai @default_message = 'No message' @exc_key = :err @with_fields = {} - @formatter = create_formatter + @formatter = create_formatter if @formatter.nil? end class << self diff --git a/spec/logger_spec.rb b/spec/logger_spec.rb index <HASH>..<HASH> 100644 --- a/spec/logger_spec.rb +++ b/spec/logger_spec.rb @@ -51,6 +51,24 @@ describe Ougai::Logger do end end + describe '.new' do + context 'if formatter argument is not specified' do + it 'sets Bunyan to formatter attribute' do + expect(logger.formatter).to be_an(Ougai::Formatters::Bunyan) + end + end + + if RUBY_VERSION > '2.4' + context 'if formatter argument is specified' do + it 'sets it to formatter attribute' do + a_formatter = Ougai::Formatters::Readable.new + a_logger = described_class.new(io, formatter: a_formatter) + expect(a_logger.formatter).to eq a_formatter + end + end + end + end + shared_examples 'log' do context 'with message' do it 'outputs valid' do
Make Logger.new support formatter argument - Ruby <I> or later
tilfin_ougai
train
rb,rb
25092f7f2961c4def72edd1689484ef7bb8850e2
diff --git a/resource_aws_ecs_service.go b/resource_aws_ecs_service.go index <HASH>..<HASH> 100644 --- a/resource_aws_ecs_service.go +++ b/resource_aws_ecs_service.go @@ -240,7 +240,7 @@ func resourceAwsEcsServiceCreate(d *schema.ResourceData, meta interface{}) error return nil }) if err != nil { - return err + return fmt.Errorf("%s %q", err, d.Get("name").(string)) } service := *out.Service
provider/aws: aws_ecs_service should output service name along with error (#<I>)
terraform-providers_terraform-provider-aws
train
go
eb3bcc0be6a66adfc268c43b62e346428eadb406
diff --git a/Repository/SlugMapItemRepository.php b/Repository/SlugMapItemRepository.php index <HASH>..<HASH> 100644 --- a/Repository/SlugMapItemRepository.php +++ b/Repository/SlugMapItemRepository.php @@ -26,7 +26,7 @@ class SlugMapItemRepository extends EntityRepository * * @return \Doctrine\ORM\QueryBuilder */ - public function getBuilderByClassesAndProperty(array $classes, string $property): QueryBuilder + public function createBuilderByClassesAndProperty(array $classes, string $property): QueryBuilder { $qb = $this->createDefaultBuilder(); $this
Rename getBuilder* repository methods to createBuilder*.
DarvinStudio_DarvinContentBundle
train
php
0eed30f70832429c5bfb595b5a74730490c6ad65
diff --git a/src/main/java/org/efaps/admin/datamodel/ui/RateUI.java b/src/main/java/org/efaps/admin/datamodel/ui/RateUI.java index <HASH>..<HASH> 100644 --- a/src/main/java/org/efaps/admin/datamodel/ui/RateUI.java +++ b/src/main/java/org/efaps/admin/datamodel/ui/RateUI.java @@ -117,7 +117,7 @@ public class RateUI * The Class Value. */ public static class Value - implements Serializable + implements Serializable, Comparable<Value> { /** The Constant serialVersionUID. */ private static final long serialVersionUID = 1L; @@ -238,6 +238,16 @@ public class RateUI } @Override + public int compareTo(final Value _arg0) + { + int ret = 0; + if (_arg0 != null) { + ret = _arg0.getRate().compareTo(getRate()); + } + return ret; + } + + @Override public String toString() { return ToStringBuilder.reflectionToString(this);
RateUI.Value must implement Comparable
eFaps_eFaps-Kernel
train
java
45a0c3551d4a83bd6eb1aa88eefcdfda89f57a2a
diff --git a/score-api/src/main/java/com/hp/score/api/StartBranchDataContainer.java b/score-api/src/main/java/com/hp/score/api/StartBranchDataContainer.java index <HASH>..<HASH> 100644 --- a/score-api/src/main/java/com/hp/score/api/StartBranchDataContainer.java +++ b/score-api/src/main/java/com/hp/score/api/StartBranchDataContainer.java @@ -62,7 +62,6 @@ public class StartBranchDataContainer implements Serializable{ .append(this.startPosition, that.startPosition) .append(this.executionPlanId, that.executionPlanId) .append(this.contexts, that.contexts) - .append(this.systemContext, that.systemContext) .isEquals(); } @@ -71,8 +70,6 @@ public class StartBranchDataContainer implements Serializable{ return new HashCodeBuilder() .append(this.startPosition) .append(this.executionPlanId) - .append(this.contexts) - .append(this.systemContext) .toHashCode(); } }
try to fix StackOverflow issue in tests...
CloudSlang_score
train
java
6e1bc689c5cb1816e3afd87b1efbd183b2e0fda2
diff --git a/packages/core/integration-tests/test/integration/globals/index.js b/packages/core/integration-tests/test/integration/globals/index.js index <HASH>..<HASH> 100644 --- a/packages/core/integration-tests/test/integration/globals/index.js +++ b/packages/core/integration-tests/test/integration/globals/index.js @@ -2,7 +2,7 @@ module.exports = function () { return { dir: __dirname, file: __filename, - buf: new Buffer(process.title).toString('base64'), + buf: Buffer.from(process.title).toString('base64'), global: !!global.document }; }; diff --git a/packages/core/integration-tests/test/javascript.js b/packages/core/integration-tests/test/javascript.js index <HASH>..<HASH> 100644 --- a/packages/core/integration-tests/test/javascript.js +++ b/packages/core/integration-tests/test/javascript.js @@ -802,7 +802,7 @@ describe('javascript', function() { assert.deepEqual(output(), { dir: path.join(__dirname, '/integration/globals'), file: path.join(__dirname, '/integration/globals/index.js'), - buf: new Buffer('browser').toString('base64'), + buf: Buffer.from('browser').toString('base64'), global: true }); });
Use Buffer.from (#<I>)
parcel-bundler_parcel
train
js,js
8e580ec9278884c142df9b57ea22413d6eab84d8
diff --git a/lib/chef/application/client.rb b/lib/chef/application/client.rb index <HASH>..<HASH> 100644 --- a/lib/chef/application/client.rb +++ b/lib/chef/application/client.rb @@ -289,7 +289,7 @@ class Chef::Application::Client < Chef::Application :boolean => true option :fips, - :long => "--fips", + :long => "--[no-]fips", :description => "Enable fips mode", :boolean => true
Fix --no-fips on chef-client Somewhat unintuitively, mixlib-cli needs both `boolean => true` as well as `long => "--[no-]option"` for the boolean feature to work.
chef_chef
train
rb
78bd879d2359025d79e6303f223e60f035acaba2
diff --git a/src/main/java/eu/project/ttc/readers/AbstractTermSuiteCollectionReader.java b/src/main/java/eu/project/ttc/readers/AbstractTermSuiteCollectionReader.java index <HASH>..<HASH> 100644 --- a/src/main/java/eu/project/ttc/readers/AbstractTermSuiteCollectionReader.java +++ b/src/main/java/eu/project/ttc/readers/AbstractTermSuiteCollectionReader.java @@ -129,7 +129,9 @@ public abstract class AbstractTermSuiteCollectionReader extends CollectionReader this.totalFileByteSize += files[i].length(); } } - Collections.sort(this.mFiles, SizeFileComparator.SIZE_REVERSE); + Collections.shuffle(this.mFiles); +// Collections.sort(this.mFiles, SizeFileComparator.SIZE_REVERSE); + logger.info("Initializing collection reader on input dir {} (enc: {}, lang: {}, type: {})", inputDirPath, this.mEncoding,
Process documents in randomized order instead of biggest-first
termsuite_termsuite-core
train
java
3db167571b895bee47725ff8e2620d2aa4dfc631
diff --git a/utils/storage/storage_test.go b/utils/storage/storage_test.go index <HASH>..<HASH> 100644 --- a/utils/storage/storage_test.go +++ b/utils/storage/storage_test.go @@ -18,13 +18,6 @@ const database = "TestStoreAndLookup.db" func TestStoreAndLookup(t *testing.T) { var itf Interface - - { - if err := os.Remove(database); err != nil { - panic(err) - } - } - defer func() { if itf == nil { return @@ -32,6 +25,7 @@ func TestStoreAndLookup(t *testing.T) { if err := itf.Close(); err != nil { panic(err) } + os.Remove(database) }() // --------------------
[hotfix] Delete the test database AFTER the tests.
TheThingsNetwork_ttn
train
go
570437c98a7033329d4bba9ad19e686407fd65b0
diff --git a/test/tests.js b/test/tests.js index <HASH>..<HASH> 100644 --- a/test/tests.js +++ b/test/tests.js @@ -1,5 +1,13 @@ /* global describe, it, before */ +function hammingDistance(a, b, bits) { + var d = 0; + for (i = 0; i < bits*bits; i++) { + if (a[i] != b[i]) { d++; } + } + return d; +} + var expect = require('expect.js'); var glob = require('glob'); var path = require('path'); @@ -42,7 +50,9 @@ testFiles.forEach(function(fn) { encoding: 'utf-8' }).split(/\s/)[1]; - expect(expectedHash).to.be(hash.join("")); + // use hamming distance to iron out little differences in jpeg decoders + var hd = hammingDistance(expectedHash, hash.join(""), bits); + expect(hd).to.be.lessThan(3); done(); });
Test with hamming distance to iron out little differences in jpeg decoders.
commonsmachinery_blockhash-js
train
js
cc6d3b2457d7378d7cef33b75631e14070896978
diff --git a/src/main/groovy/lang/Binding.java b/src/main/groovy/lang/Binding.java index <HASH>..<HASH> 100644 --- a/src/main/groovy/lang/Binding.java +++ b/src/main/groovy/lang/Binding.java @@ -22,6 +22,7 @@ import java.util.Map; * Represents the variable bindings of a script which can be altered * from outside the script object or created outside of a script and passed * into it. + * <p> Binding instances are not supposed to be used in a multithreaded context. * * @author <a href="mailto:[email protected]">James Strachan</a> * @version $Revision$
GROOVY-<I>: added a note saying that Binding is not intended to be used in a multithreaded context git-svn-id: <URL>
apache_groovy
train
java
442d59327558de04e3110068265d8bef25032f01
diff --git a/ella/ellacomments/newman_admin.py b/ella/ellacomments/newman_admin.py index <HASH>..<HASH> 100644 --- a/ella/ellacomments/newman_admin.py +++ b/ella/ellacomments/newman_admin.py @@ -17,7 +17,7 @@ class CommentOptionsGenericInline(newman.GenericStackedInline): class ThreadedCommentsNewmanAdmin(ThreadedCommentsAdmin, newman.NewmanModelAdmin): pass -MODELS_WITH_COMMENTS = getattr(settings, 'MODELS_WITH_COMMENTS', ('articles.article', 'galleries.gallery',)) +MODELS_WITH_COMMENTS = getattr(settings, 'MODELS_WITH_COMMENTS', ('articles.article', 'galleries.gallery', 'interviews.interview', )) newman.site.register(ThreadedComment, ThreadedCommentsNewmanAdmin) newman.site.append_inline(MODELS_WITH_COMMENTS, CommentOptionsGenericInline)
Added interviews.interview to MODELS_WITH_COMMENTS
ella_ella
train
py
220e3501afd4dd9f3f75deef18dc13c7fe9a7682
diff --git a/src/corePlugins.js b/src/corePlugins.js index <HASH>..<HASH> 100644 --- a/src/corePlugins.js +++ b/src/corePlugins.js @@ -884,6 +884,7 @@ export let corePlugins = { addUtilities({ '.grid-flow-row': { gridAutoFlow: 'row' }, '.grid-flow-col': { gridAutoFlow: 'column' }, + '.grid-flow-dense': { gridAutoFlow: 'dense' }, '.grid-flow-row-dense': { gridAutoFlow: 'row dense' }, '.grid-flow-col-dense': { gridAutoFlow: 'column dense' }, })
Add missing `.grid-flow-dense` utility (#<I>)
tailwindcss_tailwindcss
train
js
6a39fa4191895c52a18015c8d29a9f83b27f4e1e
diff --git a/salt/modules/status.py b/salt/modules/status.py index <HASH>..<HASH> 100644 --- a/salt/modules/status.py +++ b/salt/modules/status.py @@ -501,8 +501,8 @@ def pid(sig): if (not sig.endswith('"') and not sig.endswith("'") and not sig.startswith('-')): sig = "'" + sig + "'" - cmd = "{0[ps]} | grep {1} | grep -v grep | awk '{{print $2}}'".format( - __grains__, sig) + cmd = ("{0[ps]} | grep {1} | grep -v grep | fgrep -v status.pid | " + "awk '{{print $2}}'".format(__grains__, sig)) return (__salt__['cmd.run_stdout'](cmd) or '')
Keep pid of salt CLI command from showing in status.pid output When status.pid is executed by the salt-minion instance running on the master, it also returns the pid of the salt CLI command, because the search string matches. This commit adds another grep to the ps/grep/awk one-liner, which filters out lines matching 'status.pid'.
saltstack_salt
train
py
14b15bcd2cd77a4bf22e9272a453ed82f319dc66
diff --git a/pinboard/metadata.py b/pinboard/metadata.py index <HASH>..<HASH> 100644 --- a/pinboard/metadata.py +++ b/pinboard/metadata.py @@ -1,4 +1,4 @@ -__version__ = "0.4" +__version__ = "0.4.1" __author__ = "Dan Loewenherz" __copyright__ = "Copyright 2014, Lionheart Software" __maintainer__ = "Dan Loewenherz" diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -5,7 +5,6 @@ import unittest import os from pinboard import metadata from distutils.cmd import Command -from test_pinboard import TestPinboardAPI try: from setuptools import setup @@ -38,6 +37,7 @@ class TestCommand(Command): pass def run(self): + from test_pinboard import TestPinboardAPI suite = unittest.TestLoader().loadTestsFromTestCase(TestPinboardAPI) unittest.TextTestRunner(verbosity=2).run(suite) @@ -52,7 +52,7 @@ setup( license=metadata.__license__, author=metadata.__author__, author_email=metadata.__email__, - packages=['pinboard'], + packages=['pinboard', 'test_pinboard'], package_data={'': ['LICENSE', 'README.rst']}, cmdclass={'test': TestCommand}, scripts=["bin/pinboard"]
make sure TestPinboard is only imported when needed
lionheart_pinboard.py
train
py,py
a3b1f287aeb2c7df76f623fc339b8a4e6f5c4247
diff --git a/lib/veritas/optimizer.rb b/lib/veritas/optimizer.rb index <HASH>..<HASH> 100644 --- a/lib/veritas/optimizer.rb +++ b/lib/veritas/optimizer.rb @@ -20,18 +20,18 @@ module Veritas def self.chain(*optimizers) optimizers.reverse_each.reduce(Noop) do |successor, optimizer| - create_optimizer(optimizer, successor) + link_optimizers(optimizer, successor) end end - def self.create_optimizer(optimizer, successor) + def self.link_optimizers(optimizer, successor) lambda do |operation| op = optimizer.new(operation) op.optimizable? ? op.optimize : successor.call(operation) end end - private_class_method :create_optimizer + private_class_method :link_optimizers end # class Optimizer end # module Veritas
Renamed private Optimizer method to reflect it's responsibility better
dkubb_axiom
train
rb
1688854b92cd137ce7e3fc264820dcf47ee32a50
diff --git a/src/editor/CodeHintManager.js b/src/editor/CodeHintManager.js index <HASH>..<HASH> 100644 --- a/src/editor/CodeHintManager.js +++ b/src/editor/CodeHintManager.js @@ -129,7 +129,7 @@ * into the editor; * 2. match, a string that the manager may use to emphasize substrings of * hints in the hint list (case-insensitive); and - * 3. selectInitial, a boolean that indicates whether or not the the + * 3. selectInitial, a boolean that indicates whether or not the * first hint in the list should be selected by default. * 4. handleWideResults, a boolean (or undefined) that indicates whether * to allow result string to stretch width of display.
Edit CodeHintManager.js (#<I>) the the ->the
adobe_brackets
train
js
0dc5d53ba38f1cce0d91cf87c695dd4e22d3c309
diff --git a/src/InfoViz/Core/SelectionProvider/histogram2d.js b/src/InfoViz/Core/SelectionProvider/histogram2d.js index <HASH>..<HASH> 100644 --- a/src/InfoViz/Core/SelectionProvider/histogram2d.js +++ b/src/InfoViz/Core/SelectionProvider/histogram2d.js @@ -235,6 +235,7 @@ function get(model, query) { function getNotificationData(model, request) { const result = {}; let missingData = false; + const generationNumbers = []; request.variables.forEach(axes => { const histograms = get(model, { axes }); @@ -243,11 +244,21 @@ function getNotificationData(model, request) { result[axes[0]] = {}; } result[axes[0]][axes[1]] = histograms; + histograms.forEach(hist => generationNumbers.push(hist.annotationInfo.annotationGeneration)); } else { missingData = true; } }); + // Prevent generation mix in result + generationNumbers.sort(); + const generation = generationNumbers.shift(); + if (generationNumbers.length && generation !== generationNumbers.pop()) { + return null; + } + + result['##annotationGeneration##'] = generation; + return missingData ? null : result; }
fix(SelectionProvider): send notification only when histogram 2D have consistent annotation generati
Kitware_paraviewweb
train
js
9daf55154c72b9fcf0ca674c179cf9a48d544d97
diff --git a/devices.js b/devices.js index <HASH>..<HASH> 100755 --- a/devices.js +++ b/devices.js @@ -4668,6 +4668,18 @@ const devices = [ description: 'ZigBee AC phase-cut dimmer', extend: generic.light_onoff_brightness, }, + { + zigbeeModel: ['ZG2833K4_EU06'], + model: 'SR-ZG9001K4-DIM2', + vendor: 'Sunricher', + description: 'ZigBee double key wall switch', + supports: 'on/off, brightness', + fromZigbee: [ + fz.genOnOff_cmdOn, fz.genOnOff_cmdOff, fz.cmd_move_with_onoff, fz.cmd_stop_with_onoff, fz.generic_battery, + fz.ignore_basic_change, fz.ignore_diagnostic_change, fz.ignore_power_change, + ], + toZigbee: [], + }, // Shenzhen Homa {
add support for SR-ZG<I>K4-DIM2 (#<I>) * add support for SR-ZG<I>K4-DIM2 * ignore diagnostics * add supports * add comma
Koenkk_zigbee-shepherd-converters
train
js
f3edfc8da1fbbc21b447217190de24a02a30f316
diff --git a/src/index.js b/src/index.js index <HASH>..<HASH> 100644 --- a/src/index.js +++ b/src/index.js @@ -63,11 +63,13 @@ export function runMiddleware (middleware, context, done) { // Copy the middleware to our own array we can safely .shift() var mw = middleware.slice(0); - const callNext = function () { + const callNext = function (...args) { + // Filter falsey values + var argCount = args.filter(item => !!item).length; // Find the next middleware to call in the stack (if any) var next = mw.shift(); // Attempt to invoke the next middleware - if (arguments.length === 0 && next) { + if (argCount === 0 && next) { try { return next(context, callNext); } catch (err) {
Fixing bug with arguments count including falsey values
HelpfulHuman_Router-Kit
train
js
36b8d901d7ff8338f6f1cc45e01d1d1e28456da5
diff --git a/bcbio/pipeline/qcsummary.py b/bcbio/pipeline/qcsummary.py index <HASH>..<HASH> 100644 --- a/bcbio/pipeline/qcsummary.py +++ b/bcbio/pipeline/qcsummary.py @@ -319,11 +319,12 @@ def _run_coverage_qc(bam_file, data, out_dir): cov_bed_file = clean_file(dd.get_coverage(data), data, prefix="cov-", simple=True) merged_bed_file = bedutils.merge_overlaps(cov_bed_file, data) target_name = "coverage" - elif dd.get_coverage_interval(data) != "genome": - merged_bed_file = dd.get_variant_regions_merged(data) - target_name = "variant_regions" else: - target_name = None + merged_bed_file = dd.get_variant_regions_merged(data) + if dd.get_coverage_interval(data) != "genome": + target_name = "variant_regions" + else: + target_name = None if target_name: ontarget = sambamba.number_mapped_reads_on_target(
QC: enable avg_coverage calculation for wgs runs
bcbio_bcbio-nextgen
train
py
dd6fa3c6ab88b3472b2d3603df30195fb7000b7d
diff --git a/lib/cf/cli/space/delete.rb b/lib/cf/cli/space/delete.rb index <HASH>..<HASH> 100644 --- a/lib/cf/cli/space/delete.rb +++ b/lib/cf/cli/space/delete.rb @@ -16,7 +16,6 @@ module CF::Space input :really, :type => :boolean, :forget => true, :hidden => true, :default => proc { force? || interact } def delete_space - org = input[:organization] spaces = input[:spaces, org] deleted_current = false
no longer need to fetch org manually
cloudfoundry-attic_cf
train
rb
2e54c0b9f63891a96467847a2e63e456cdb69d11
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb index <HASH>..<HASH> 100644 --- a/spec/spec_helper.rb +++ b/spec/spec_helper.rb @@ -15,6 +15,7 @@ Dir[File.join(File.dirname(__FILE__), "support/shared_contexts/**/*.rb")].each d end RSpec.configure do |config| + config.color = true config.order = "random" config.disable_monkey_patching! config.filter_run_when_matching :focus
Updated RSpec spec helper to enable color output. - This used to be a global setting that is now configured at the project level. - Provides improved transparency for the project.
bkuhlmann_tocer
train
rb
e4da9beb64e9e84116712a2bbe1a2e43f85c0d91
diff --git a/lexicon/providers/base.py b/lexicon/providers/base.py index <HASH>..<HASH> 100644 --- a/lexicon/providers/base.py +++ b/lexicon/providers/base.py @@ -44,7 +44,7 @@ class Provider(object): # pylint: disable=useless-object-inheritance # this key. However there were no automated logic if the Provider is used directly. # So we provide this logic here. if not config.get('provider_name') and not config.get('provider'): - config['provider'] = __name__ # Obviously we use the module name itself. + config['provider_name'] = __name__ # Obviously we use the module name itself. self.config = legacy_config_resolver(config) else: self.config = config
Prefer provider_name for historical reasons
AnalogJ_lexicon
train
py
98bd63cb2734f78e8b4c6566d69ce138552985f5
diff --git a/lib/i18n/backend/base.rb b/lib/i18n/backend/base.rb index <HASH>..<HASH> 100644 --- a/lib/i18n/backend/base.rb +++ b/lib/i18n/backend/base.rb @@ -65,9 +65,13 @@ module I18n end # Returns an array of locales for which translations are available + # ignoring the reserved translation meta data key :i18n. def available_locales init_translations unless initialized? - translations.keys + translations.inject([]) do |locales, (locale, data)| + locales << locale unless data.keys.tap { |keys| keys.delete(:i18n) }.empty? + locales + end end def reload!
change available_translations to ignore a reserved meta translation key :i<I>n
ruby-i18n_i18n
train
rb
b80bf0d0f9eaae1763db995ee82a32632d8e0601
diff --git a/eZ/Publish/Core/REST/common.php b/eZ/Publish/Core/REST/common.php index <HASH>..<HASH> 100644 --- a/eZ/Publish/Core/REST/common.php +++ b/eZ/Publish/Core/REST/common.php @@ -19,8 +19,12 @@ namespace eZ\Publish\Core\REST; // phpunit.xml files. This defines what encoding will be generated and thus send // to the server. $generator = getenv( 'backendEncoding' ) === 'xml' ? - new Common\Output\Generator\Xml() : - new Common\Output\Generator\Json(); + new Common\Output\Generator\Xml( + new Common\Output\Generator\Xml\FieldTypeHashGenerator() + ) : + new Common\Output\Generator\Json( + new Common\Output\Generator\Json\FieldTypeHashGenerator() + ); // The URL Handler is responsible for URL parsing and generation. It will be // used in the output generators and in some parsing handlers.
Fixed: Creation of generators in common.php.
ezsystems_ezpublish-kernel
train
php
986f7e42a2fc56afd8a0fee644244a7df37529ed
diff --git a/lib/gollum-lib/markups.rb b/lib/gollum-lib/markups.rb index <HASH>..<HASH> 100644 --- a/lib/gollum-lib/markups.rb +++ b/lib/gollum-lib/markups.rb @@ -58,7 +58,7 @@ module Gollum if gem_exists?('pandoc-ruby') GitHub::Markup::Markdown::MARKDOWN_GEMS.clear GitHub::Markup::Markdown::MARKDOWN_GEMS['pandoc-ruby'] = proc { |content| - PandocRuby.convert(content, :from => 'markdown-tex_math_dollars-raw_tex', :to => :html, :filter => 'pandoc-citeproc') + PandocRuby.convert(content, '--citeproc', :from => 'markdown-tex_math_dollars-raw_tex', :to => :html) } else GitHub::Markup::Markdown::MARKDOWN_GEMS['kramdown'] = proc { |content|
Update call to pandoc. Resolves #<I>
gollum_gollum-lib
train
rb
b9d5aca5001f8fe77e86f1bd78d4ee813ff76ee7
diff --git a/lib/mongoid/criteria/scrollable.rb b/lib/mongoid/criteria/scrollable.rb index <HASH>..<HASH> 100644 --- a/lib/mongoid/criteria/scrollable.rb +++ b/lib/mongoid/criteria/scrollable.rb @@ -5,7 +5,6 @@ module Mongoid raise_multiple_sort_fields_error if multiple_sort_fields? criteria = dup criteria.merge!(default_sort) if no_sort_option? - scroll_direction = scroll_direction(criteria) # scroll cursor from the parameter, with value and tiebreak_id cursor_options = build_cursor_options(criteria) cursor = cursor.is_a?(Mongoid::Scroll::Cursor) ? cursor : Mongoid::Scroll::Cursor.new(cursor, cursor_options) @@ -13,7 +12,7 @@ module Mongoid cursor_criteria.selector = { '$and' => [criteria.selector, cursor.criteria] } # scroll if block_given? - cursor_criteria.order_by(_id: scroll_direction).each do |record| + cursor_criteria.order_by(_id: scroll_direction(criteria)).each do |record| yield record, Mongoid::Scroll::Cursor.from_record(record, cursor_options) end else
Remove scroll_direction named variable in favour of method call
mongoid_mongoid-scroll
train
rb
853a5fb6ffcc6a3ebad116072d2e6f82e40651f0
diff --git a/functions/timber-post-getter.php b/functions/timber-post-getter.php index <HASH>..<HASH> 100644 --- a/functions/timber-post-getter.php +++ b/functions/timber-post-getter.php @@ -1,6 +1,6 @@ <?php -class TimberPostGetter +class TimberPostGetter { /** @@ -42,10 +42,9 @@ class TimberPostGetter return new TimberPostsCollection( $query, $PostClass ); } else { // We have a query (of sorts) to work with - $tqi = new TimberQueryIterator( $query, $PostClass ); + $tqi = new TimberQueryIterator( $query, $PostClass ); return $tqi; } - return $posts; } public static function get_pids($query){
Removed unreachable return statement in Timber::query_posts
timber_timber
train
php
0c6b734cf833b7fc222f4b1912134e63bad10d81
diff --git a/bin/cmd.js b/bin/cmd.js index <HASH>..<HASH> 100755 --- a/bin/cmd.js +++ b/bin/cmd.js @@ -496,17 +496,17 @@ function runDownload (torrentId) { if (process.platform === 'win32') { openVLCWin32(vlcCmd) } else { - openPlayer(`${vlcCmd} ${href} ${VLC_ARGS}`) + openPlayer(`${vlcCmd} "${href}" ${VLC_ARGS}`) } }) } else if (argv.iina) { - openIINA(`${IINA_EXEC} ${href}`, `iina://weblink?url=${href}`) + openIINA(`${IINA_EXEC} "${href}"`, `iina://weblink?url=${href}`) } else if (argv.mplayer) { - openPlayer(`${MPLAYER_EXEC} ${href}`) + openPlayer(`${MPLAYER_EXEC} "${href}"`) } else if (argv.mpv) { - openPlayer(`${MPV_EXEC} ${href}`) + openPlayer(`${MPV_EXEC} "${href}"`) } else if (argv.omx) { - openPlayer(`${OMX_EXEC} ${href}`) + openPlayer(`${OMX_EXEC} "${href}"`) } function openPlayer (cmd) {
Bug fix Fixed error resulting from bracket being into torrent file name, when passing file to video player.
webtorrent_webtorrent-cli
train
js
8a35d103c59c221512ec011715da11faaedfe768
diff --git a/tests/Unit/Console/SchemaConfigMakeCommandTest.php b/tests/Unit/Console/SchemaConfigMakeCommandTest.php index <HASH>..<HASH> 100644 --- a/tests/Unit/Console/SchemaConfigMakeCommandTest.php +++ b/tests/Unit/Console/SchemaConfigMakeCommandTest.php @@ -17,7 +17,7 @@ class SchemaConfigMakeCommandTest extends TestCase public function testCommand( string $inputName, string $expectedFilename, - string $expectedClassDefinition, + string $expectedClassDefinition ): void { $this->assertMakeCommand( 'Schema',
Make code compatible with PHP < 8
rebing_graphql-laravel
train
php
4f75f6fc028a38f5954189ab77403b35c4b8a671
diff --git a/code/dataobjects/CdnImage.php b/code/dataobjects/CdnImage.php index <HASH>..<HASH> 100644 --- a/code/dataobjects/CdnImage.php +++ b/code/dataobjects/CdnImage.php @@ -147,7 +147,7 @@ class CdnImage extends Image { parent::onBeforeWrite(); $changed = $this->getChangedFields(false, DataObject::CHANGE_VALUE); - if (isset($changed['Name']) || isset($changed['Title'])) { + if (isset($changed['Name']) || isset($changed['Title']) || isset($changed['ParentID'])) { // we want to store the new sampled values $this->deleteResamplings(); }
fix(CDNImage) Ensure moved images have their resamplings reset and rebuilt
symbiote_silverstripe-cdncontent
train
php
d96e01322cce0c898a216f4f3d04a9a0c543450b
diff --git a/runtime/errors.go b/runtime/errors.go index <HASH>..<HASH> 100644 --- a/runtime/errors.go +++ b/runtime/errors.go @@ -65,9 +65,9 @@ var ( ) type errorBody struct { - Error string `protobuf:"bytes,1,name=error" json:"error"` - Code int32 `protobuf:"varint,2,name=code" json:"code"` - Details []any.Any `protobuf:"bytes,3,name=details" json:"details"` + Error string `protobuf:"bytes,1,name=error" json:"error"` + Code int32 `protobuf:"varint,2,name=code" json:"code"` + Details []*any.Any `protobuf:"bytes,3,rep,name=details" json:"details,omitempty"` } // Make this also conform to proto.Message for builtin JSONPb Marshaler @@ -103,7 +103,7 @@ func DefaultHTTPError(ctx context.Context, mux *ServeMux, marshaler Marshaler, w if err != nil { grpclog.Printf("Failed to marshal any: %v", err) } else { - body.Details = append(body.Details, *a) + body.Details = append(body.Details, a) } } }
fix noenc error by fixing Details error field As advised by @itizir in the discussion of #<I>. However, I've gone with the minimal change to make the error go away, I haven't introduced a proper, generated protobuf definition. Also, I've noticed our error body message there looks suspiciously like Status[1], except for the field name "Error" vs "Message". Fixes #<I>. [1]: <URL>
grpc-ecosystem_grpc-gateway
train
go
ff32eb67559383aa7ce9d71e70ab5291736b3d31
diff --git a/pyinfra/version.py b/pyinfra/version.py index <HASH>..<HASH> 100644 --- a/pyinfra/version.py +++ b/pyinfra/version.py @@ -2,4 +2,4 @@ # File: pyinfra/version.py # Desc: version file -__version__ = '0.9.6' +__version__ = '0.9.7'
Version bump for <I>.
Fizzadar_pyinfra
train
py
de2c719d65764c9b024f07208c8e4fcf02f5a27e
diff --git a/nifstd/nifstd_tools/hbp_cells.py b/nifstd/nifstd_tools/hbp_cells.py index <HASH>..<HASH> 100755 --- a/nifstd/nifstd_tools/hbp_cells.py +++ b/nifstd/nifstd_tools/hbp_cells.py @@ -7,12 +7,12 @@ from datetime import date import rdflib from rdflib.extras import infixowl from pyontutils.core import makeGraph +from pyontutils.utils import working_dir from pyontutils.scigraph import Vocabulary from pyontutils.namespaces import makePrefixes from IPython import embed -current_file = Path(__file__).absolute() -gitf = current_file.parent.parent.parent +gitf = working_dir.parent v = Vocabulary()
hbp_cells fixed link to git folder
tgbugs_pyontutils
train
py
ac54da3dacb70a8de9c6c0054ab016ea8110f3a8
diff --git a/libraries/joomla/base/adapter.php b/libraries/joomla/base/adapter.php index <HASH>..<HASH> 100644 --- a/libraries/joomla/base/adapter.php +++ b/libraries/joomla/base/adapter.php @@ -55,7 +55,7 @@ class JAdapter extends JObject /** * Database Connector Object * - * @var object + * @var JDatabase * @since 11.1 */ protected $_db; @@ -81,7 +81,7 @@ class JAdapter extends JObject /** * Get the database connector object * - * @return object Database connector object + * @return JDatabase Database connector object * * @since 11.1 */ diff --git a/libraries/joomla/database/table.php b/libraries/joomla/database/table.php index <HASH>..<HASH> 100644 --- a/libraries/joomla/database/table.php +++ b/libraries/joomla/database/table.php @@ -322,7 +322,7 @@ abstract class JTable extends JObject /** * Method to get the JDatabase connector object. * - * @return object The internal database connector object. + * @return JDatabase The internal database connector object. * * @link http://docs.joomla.org/JTable/getDBO * @since 11.1
Proper return types for DB objects and getDbo
joomla_joomla-framework
train
php,php