hash
stringlengths 40
40
| diff
stringlengths 131
26.7k
| message
stringlengths 7
694
| project
stringlengths 5
67
| split
stringclasses 1
value | diff_languages
stringlengths 2
24
|
---|---|---|---|---|---|
3964f77d6787c8f94bf831050c2fc41b461fbc73 | diff --git a/lib/modules/apostrophe-pages/public/js/reorganize.js b/lib/modules/apostrophe-pages/public/js/reorganize.js
index <HASH>..<HASH> 100644
--- a/lib/modules/apostrophe-pages/public/js/reorganize.js
+++ b/lib/modules/apostrophe-pages/public/js/reorganize.js
@@ -241,14 +241,12 @@ apos.define('apostrophe-pages-reorganize', {
// Refuse requests to move something before the
// home page, or after it (as a peer). Inside it is fine
var target = e.move_info.target_node;
- if (!target.parent.parent) {
- if (e.move_info.position !== 'inside') {
+ if ((!target.parent.parent) && (e.move_info.position !== 'inside')) {
return;
}
}
// You also can't move something after the conventional trashcan
- if ((target.type === 'trash') && (!target.virtualTrashcan)) {
- if (e.move_info.position === 'after') {
+ if ((target.type === 'trash') && (!target.virtualTrashcan) && (e.move_info.position === 'after')) {
return;
}
} | single if rather than nested | apostrophecms_apostrophe | train | js |
678d4e528691550d6ce79ee806eaf490104d3b76 | diff --git a/src/Psalm/Internal/Analyzer/Statements/Block/LoopAnalyzer.php b/src/Psalm/Internal/Analyzer/Statements/Block/LoopAnalyzer.php
index <HASH>..<HASH> 100644
--- a/src/Psalm/Internal/Analyzer/Statements/Block/LoopAnalyzer.php
+++ b/src/Psalm/Internal/Analyzer/Statements/Block/LoopAnalyzer.php
@@ -462,8 +462,6 @@ class LoopAnalyzer
foreach ($loop_scope->possibly_unreferenced_vars as $var_id => $locations) {
if (isset($inner_context->unreferenced_vars[$var_id])) {
$inner_context->unreferenced_vars[$var_id] += $locations;
- } else {
- $inner_context->unreferenced_vars[$var_id] = $locations;
}
} | Fix #<I> - improve loop-based handling of vars after continue | vimeo_psalm | train | php |
475eadb956986e95d52e8a0ca5256a8831ba7bc8 | diff --git a/util/index.go b/util/index.go
index <HASH>..<HASH> 100644
--- a/util/index.go
+++ b/util/index.go
@@ -43,9 +43,6 @@ func (idx *Index) Delete(key string) error {
idx.lock.Lock()
defer idx.lock.Unlock()
- if _, exists := idx.data[key]; !exists {
- return fmt.Errorf("BUG: About to remove non-existed key %v from index", key)
- }
delete(idx.data, key)
return nil
}
diff --git a/util/util_test.go b/util/util_test.go
index <HASH>..<HASH> 100644
--- a/util/util_test.go
+++ b/util/util_test.go
@@ -227,7 +227,7 @@ func (s *TestSuite) TestIndex(c *C) {
c.Assert(err, ErrorMatches, "BUG: Invalid empty index key")
err = index.Delete("keyx")
- c.Assert(err, ErrorMatches, "BUG: About to remove non-existed key.*")
+ c.Assert(err, IsNil)
err = index.Delete("key1")
c.Assert(err, IsNil) | Don't throw error on index cleanup | rancher_convoy | train | go,go |
add539ee75c0b0b434ebd0e3120e25d0c626342c | diff --git a/tasks/fastly.js b/tasks/fastly.js
index <HASH>..<HASH> 100644
--- a/tasks/fastly.js
+++ b/tasks/fastly.js
@@ -9,7 +9,8 @@
'use strict';
var fastly = require('fastly')
- , async = require('async');
+ , async = require('async')
+ , url = require('url');
module.exports = function(grunt) {
// Please see the Grunt documentation for more information regarding task
@@ -55,9 +56,11 @@ module.exports = function(grunt) {
this.data.urls = [];
}
- async.eachLimit(this.data.urls, options.concurrentPurges, function(url, next) {
- grunt.log.write('Purging "'+options.host+'/'+url+'"...');
- fastly.purge(options.host, url, function(err) {
+ async.eachLimit(this.data.urls, options.concurrentPurges, function(uriPath, next) {
+ var uri = url.format({host: options.host, pathname: uriPath}).substr(2);
+
+ grunt.log.write('Purging "'+uri+'"...');
+ fastly.purge(options.host, uriPath, function(err) {
if (err) grunt.log.error();
else grunt.log.ok(); | fixed bug with displaying purge url in console logs | coen-hyde_grunt-fastly | train | js |
7338eaea21c78c2d8d3a9c0200132ca1eb95d5a2 | diff --git a/spec/gherkin/parser/parser_spec.rb b/spec/gherkin/parser/parser_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/gherkin/parser/parser_spec.rb
+++ b/spec/gherkin/parser/parser_spec.rb
@@ -3,11 +3,13 @@ require 'spec_helper'
module Gherkin
module Parser
describe Parser do
- it "should raise when feature doesn't parse" do
- p = Parser.new(mock('formatter').as_null_object)
- lambda do
- p.parse("Feature: f\nFeature: f", __FILE__, __LINE__-1)
- end.should raise_error(/Parse error at/)
+ unless defined?(JRUBY_VERSION)
+ it "should raise when feature doesn't parse" do
+ p = Parser.new(mock('formatter').as_null_object)
+ lambda do
+ p.parse("Feature: f\nFeature: f", __FILE__, __LINE__-1)
+ end.should raise_error(/Parse error at/)
+ end
end
end
end | RSpec mocks can't be coerced into Java types | cucumber-attic_gherkin2 | train | rb |
b372e9514757cc8def4aa2f231b780d08951931c | diff --git a/sievelib/managesieve.py b/sievelib/managesieve.py
index <HASH>..<HASH> 100644
--- a/sievelib/managesieve.py
+++ b/sievelib/managesieve.py
@@ -243,9 +243,9 @@ class Client(object):
if len(args):
tosend += " " + " ".join(self.__prepare_args(args))
self.__dprint("Command: %s" % tosend)
- self.sock.send("%s%s" % (tosend, CRLF))
+ self.sock.sendall("%s%s" % (tosend, CRLF))
for l in extralines:
- self.sock.send("%s%s" % (l, CRLF))
+ self.sock.sendall("%s%s" % (l, CRLF))
code, data, content = self.__read_response(nblines)
if withcontent: | Used sendall() instead of send() to make sure all bytes are sent. | tonioo_sievelib | train | py |
f63ea91651e4c15e48097c83e6781a6a0500cf4e | diff --git a/library/Imbo/FrontController.php b/library/Imbo/FrontController.php
index <HASH>..<HASH> 100644
--- a/library/Imbo/FrontController.php
+++ b/library/Imbo/FrontController.php
@@ -187,6 +187,10 @@ class FrontController {
// implemented
$response->getHeaders()->set('Allow', implode(', ', $resource->getAllowedMethods()));
+ if ($request->getType() === RequestInterface::RESOURCE_IMAGE) {
+ $response->getHeaders()->set('X-Imbo-ImageIdentifier', $request->getImageIdentifier());
+ }
+
// If we have an unsafe request, we need to make sure that the request is valid
if ($request->isUnsafe()) {
$this->auth($request); | Add the image identifier as a custom response header on all image resource requests | imbo_imbo | train | php |
f785641ffae99923a495498a9676b3925ff6da2b | diff --git a/src/jukeboxcore/gui/widgetdelegate.py b/src/jukeboxcore/gui/widgetdelegate.py
index <HASH>..<HASH> 100644
--- a/src/jukeboxcore/gui/widgetdelegate.py
+++ b/src/jukeboxcore/gui/widgetdelegate.py
@@ -32,6 +32,7 @@ class WidgetDelegate(QtGui.QStyledItemDelegate):
super(WidgetDelegate, self).__init__(parent)
self._widget = self.create_widget(parent)
self._widget.setVisible(False)
+ self._widget.setAutoFillBackground(True)
self._edit_widgets = {}
self.keep_editor_size = True
"""If True, resize the editor at least to its size Hint size, or if the section allows is, bigger.""" | AutoFillBackground for delegates | JukeboxPipeline_jukebox-core | train | py |
635405c4a7c13d93c9177d4f79c00dba3026c37d | diff --git a/autolab_core/points.py b/autolab_core/points.py
index <HASH>..<HASH> 100644
--- a/autolab_core/points.py
+++ b/autolab_core/points.py
@@ -899,6 +899,16 @@ class NormalCloud(BagOfVectors):
points_of_interest = np.where(np.linalg.norm(self._data, axis=0) != 0.0)[0]
self._data = self._data[:, points_of_interest]
+ def remove_nan_normals(self):
+ """Removes normal vectors with nan magnitude.
+
+ Note
+ ----
+ This returns nothing and updates the NormalCloud in-place.
+ """
+ points_of_interest = np.where(np.isfinite(np.linalg.norm(self._data, axis=0)))[0]
+ self._data = self._data[:, points_of_interest]
+
@staticmethod
def open(filename, frame='unspecified'):
"""Create a NormalCloud from data saved in a file. | Added a function to remove NaNs from point clouds | BerkeleyAutomation_autolab_core | train | py |
3c7d47d8406bdb8b3539dd74cdfbd1e609ad121e | diff --git a/fwdpy11/_types/demography_debugger.py b/fwdpy11/_types/demography_debugger.py
index <HASH>..<HASH> 100644
--- a/fwdpy11/_types/demography_debugger.py
+++ b/fwdpy11/_types/demography_debugger.py
@@ -17,6 +17,7 @@
# along with fwdpy11. If not, see <http://www.gnu.org/licenses/>.
#
import collections
+import copy
import typing
import warnings
@@ -28,9 +29,13 @@ import fwdpy11
def _create_event_list(o):
try:
- return fwdpy11.DiscreteDemography(**o.model.asdict())
+ d = o.model.asdict()
+ dc = copy.deepcopy(d)
+ return fwdpy11.DiscreteDemography(**dc)
except AttributeError:
- return fwdpy11.DiscreteDemography(**o.asdict())
+ d = o.asdict()
+ dc = copy.deepcopy(d)
+ return fwdpy11.DiscreteDemography(**dc)
def _create_initial_deme_sizes(o): | DemographyDebugger now deep copies the initial model. This prevents
side effects from modifying any arrays, such as the migration
matrix. | molpopgen_fwdpy11 | train | py |
f239e1f1663cbea52beaf5a7e1636a0db4c7c7ed | diff --git a/lib/mongoid/tree/ordering.rb b/lib/mongoid/tree/ordering.rb
index <HASH>..<HASH> 100644
--- a/lib/mongoid/tree/ordering.rb
+++ b/lib/mongoid/tree/ordering.rb
@@ -123,9 +123,7 @@ module Mongoid
#
# @return [undefined]
def move_up
- return if at_top?
- siblings.where(:position => self.position - 1).first.inc(:position, 1)
- inc(:position, -1)
+ switch_with(-1) unless at_top?
end
##
@@ -133,9 +131,7 @@ module Mongoid
#
# @return [undefined]
def move_down
- return if at_bottom?
- siblings.where(:position => self.position + 1).first.inc(:position, -1)
- inc(:position, 1)
+ switch_with(1) unless at_bottom?
end
##
@@ -196,6 +192,11 @@ module Mongoid
private
+ def switch_with(offset)
+ siblings.where(:position => self.position + offset).first.inc(:position, -offset)
+ inc(:position, offset)
+ end
+
def move_lower_siblings_up
lower_siblings.each { |s| s.inc(:position, -1) }
end | Removes code duplication in Mongoid::Tree::Ordering#move_up and Mongoid::Tree::Ordering#move_down | benedikt_mongoid-tree | train | rb |
6d0c484330a704ad8a70574bb0f20d043a69f95d | diff --git a/assess_container_networking.py b/assess_container_networking.py
index <HASH>..<HASH> 100755
--- a/assess_container_networking.py
+++ b/assess_container_networking.py
@@ -384,11 +384,17 @@ def assess_container_networking(client, types):
# Reboot all hosts apart from machine 0 because we use machine 0 to jump
# through for some hosts.
log.info("Instrumenting reboot of all machines.")
- for host in hosts[1:]:
- ssh(client, host, 'sudo shutdown -r')
-
- # Finally reboot machine 0
- ssh(client, hosts[0], 'sudo shutdown -r')
+ try:
+ for host in hosts[1:]:
+ ssh(client, host, 'sudo shutdown -r now')
+
+ # Finally reboot machine 0
+ ssh(client, hosts[0], 'sudo shutdown -r now')
+ except subprocess.CalledProcessError as e:
+ logging.info(
+ "Error running shutdown:\nstdout: %s\nstderr: %s",
+ e.output, getattr(e, 'stderr', None))
+ raise
# Wait for the state server to shut down. This prevents us from calling
# wait_for_started before machine 0 has shut down, which can cause us | added 'now' argument in ssh | juju_juju | train | py |
1153bf1aeeb1f38c1c305c9b2898ae8bb55948d1 | diff --git a/lib/arjdbc/informix.rb b/lib/arjdbc/informix.rb
index <HASH>..<HASH> 100644
--- a/lib/arjdbc/informix.rb
+++ b/lib/arjdbc/informix.rb
@@ -2,3 +2,4 @@ require 'arjdbc'
ArJdbc.load_java_part :Informix
require 'arjdbc/informix/adapter'
require 'arjdbc/informix/connection_methods'
+ArJdbc.warn_unsupported_adapter 'informix', [4, 2] # warns on AR >= 4.2
\ No newline at end of file
diff --git a/lib/arjdbc/mimer.rb b/lib/arjdbc/mimer.rb
index <HASH>..<HASH> 100644
--- a/lib/arjdbc/mimer.rb
+++ b/lib/arjdbc/mimer.rb
@@ -1,2 +1,3 @@
require 'arjdbc'
require 'arjdbc/mimer/adapter'
+ArJdbc.warn_unsupported_adapter 'mimer', [4, 2] # warns on AR >= 4.2
\ No newline at end of file | warn on mimer and informix adapters as unsupported on <I> | jruby_activerecord-jdbc-adapter | train | rb,rb |
c1721b61efd3a30bdae6d62ea0095f45e51e7832 | diff --git a/client.go b/client.go
index <HASH>..<HASH> 100644
--- a/client.go
+++ b/client.go
@@ -586,6 +586,9 @@ func (me *Client) runConnection(sock net.Conn, torrent *torrent, discovery peerS
if !ok {
return
}
+ if hsRes.peerID == me.peerID {
+ return
+ }
torrent = me.torrent(hsRes.InfoHash)
if torrent == nil {
return | Prevent clients from connecting to other clients with the same ID | anacrolix_torrent | train | go |
a73ab264191ed3d8c75ba06ac650e1e1523d9201 | diff --git a/src/vmshepherd/http/rpc_api.py b/src/vmshepherd/http/rpc_api.py
index <HASH>..<HASH> 100644
--- a/src/vmshepherd/http/rpc_api.py
+++ b/src/vmshepherd/http/rpc_api.py
@@ -117,7 +117,7 @@ class RpcApi(handler.JSONRPCView):
try:
preset = vmshepherd.preset_manager.get_preset(preset_name)
except PresetNotFound as ex:
- logging.exception(ex)
+ logging.error(ex)
raise
# check in cache
@@ -129,7 +129,7 @@ class RpcApi(handler.JSONRPCView):
try:
vm_info = await preset.iaas.get_vm(vm_id)
except VmNotFound as ex:
- logging.exception(ex)
+ logging.error(ex)
raise
logging.info('IaaS verification ok') | use logging error insted of exception for clean logs | DreamLab_VmShepherd | train | py |
5556fcc309350e92e2054f230ff47c3e99495e4a | diff --git a/mode/javascript/javascript.js b/mode/javascript/javascript.js
index <HASH>..<HASH> 100644
--- a/mode/javascript/javascript.js
+++ b/mode/javascript/javascript.js
@@ -655,6 +655,7 @@ CodeMirror.defineMode("javascript", function(config, parserConfig) {
if (type == "variable") cx.marked = "property";
if (type == "spread") return cont(pattern);
if (type == "}") return pass();
+ if (type == "[") return cont(expression, expect(']'), expect(':'), proppattern);
return cont(expect(":"), pattern, maybeAssign);
}
function eltpattern() {
diff --git a/mode/javascript/test.js b/mode/javascript/test.js
index <HASH>..<HASH> 100644
--- a/mode/javascript/test.js
+++ b/mode/javascript/test.js
@@ -226,6 +226,12 @@
" [keyword return] [variable-2 x];",
"}");
+ MT(
+ "param_destructuring",
+ "[keyword function] [def foo]([def x] [operator =] [string-2 `foo${][number 10][string-2 }bar`]) {",
+ " [keyword return] [variable-2 x];",
+ "}");
+
MT("new_target",
"[keyword function] [def F]([def target]) {",
" [keyword if] ([variable-2 target] [operator &&] [keyword new].[keyword target].[property name]) {", | Permit quoted names for object destructuring in JavaScript syntax | codemirror_CodeMirror | train | js,js |
b6023de7dc61249b58140dfdeff460517c21b884 | diff --git a/test/tenetobids/tnet_test.py b/test/tenetobids/tnet_test.py
index <HASH>..<HASH> 100644
--- a/test/tenetobids/tnet_test.py
+++ b/test/tenetobids/tnet_test.py
@@ -178,10 +178,6 @@ def test_tnet_scrubbing_and_spline():
# Make sure there is a difference
if not np.sum(dat_scrub != dat_orig):
raise AssertionError()
- # Show that the difference between the original data at scrubbed time point is larger in data_orig
- if not np.sum(np.abs(np.diff(dat_orig[0]))-np.abs(np.diff(dat_scrub[0]))) > 0:
- raise AssertionError()
- # Future tests: test that the cubic spline is correct
def test_tnet_set_bad_files():
@@ -190,7 +186,7 @@ def test_tnet_set_bad_files():
# Set the confound pipeline in fmriprep
tnet.load_data('parcellation')
tnet.set_confound_pipeline('fmriprep')
- tnet.set_exclusion_file('confound2', '>0.5')
+ tnet.set_exclusion_file('confound2', '>0')
if not len(tnet.bad_files) == 1:
raise AssertionError()
if not tnet.bad_files[0] == tnet.BIDS_dir + 'derivatives/' + tnet.pipeline + \ | updating tests to new confounds file | wiheto_teneto | train | py |
55748f301c4beec954afb418eaa6d4161275ed9e | diff --git a/src/NodeCompiler/CompileNodeToValue.php b/src/NodeCompiler/CompileNodeToValue.php
index <HASH>..<HASH> 100644
--- a/src/NodeCompiler/CompileNodeToValue.php
+++ b/src/NodeCompiler/CompileNodeToValue.php
@@ -124,7 +124,7 @@ class CompileNodeToValue
$classInfo = null;
if ('self' === $className || 'static' === $className) {
- $classInfo = $context->getSelf();
+ $classInfo = $this->getConstantDeclaringClass($node->name, $context);
}
if (null === $classInfo) {
@@ -247,4 +247,20 @@ class CompileNodeToValue
throw new Exception\UnableToCompileNode('Unable to compile binary operator: ' . get_class($node));
}
+
+ /**
+ * @param string $constantName
+ * @param \Roave\BetterReflection\NodeCompiler\CompilerContext $context
+ * @return \Roave\BetterReflection\Reflection\ReflectionClass|null
+ */
+ private function getConstantDeclaringClass(string $constantName, CompilerContext $context)
+ {
+ $classInfo = $context->getSelf();
+ while (!$classInfo->hasConstant($constantName) && $classInfo->getParentClass() !== null) {
+ $classInfo = $classInfo->getParentClass();
+ }
+
+ return $classInfo;
+ }
+
} | Fixed class constant fetch when constant is declared in parent | Roave_BetterReflection | train | php |
b60b01c4fede7d6e8167e3a057c61f249a0f9a8b | diff --git a/{{cookiecutter.repo_name}}/{{cookiecutter.repo_name}}/config/production.py b/{{cookiecutter.repo_name}}/{{cookiecutter.repo_name}}/config/production.py
index <HASH>..<HASH> 100644
--- a/{{cookiecutter.repo_name}}/{{cookiecutter.repo_name}}/config/production.py
+++ b/{{cookiecutter.repo_name}}/{{cookiecutter.repo_name}}/config/production.py
@@ -80,9 +80,8 @@ AWS_EXPIRY = 60 * 60 * 24 * 7
# Revert the following and use str after the above-mentioned bug is fixed in
# either django-storage-redux or boto
AWS_HEADERS = {
- 'Cache-Control': str.encode(
- 'max-age=%d, s-maxage=%d, must-revalidate' % (
- AWS_EXPIREY, AWS_EXPIREY))
+ 'Cache-Control': str.encode('max-age=%d, s-maxage=%d, must-revalidate' % (
+ AWS_EXPIRY, AWS_EXPIRY))
}
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-url | fix(settings/production): fix typo s/AWS_EXPIREY/AWS_EXPIRY/g | pydanny_cookiecutter-django | train | py |
f6230d771cf71d9a53be090ba87b3e8f669feeb9 | diff --git a/addon/components/side-menu.js b/addon/components/side-menu.js
index <HASH>..<HASH> 100644
--- a/addon/components/side-menu.js
+++ b/addon/components/side-menu.js
@@ -83,9 +83,12 @@ export default Ember.Component.extend({
const rootNode = document.querySelector("body");
const onTouchStart = Ember.run.bind(this, this.rootNodeTouch);
- this.set("rootNode", rootNode);
- this.set("onTouchStart", onTouchStart);
rootNode.addEventListener("touchstart", onTouchStart);
+
+ Ember.run.schedule("afterRender", () => {
+ this.set("rootNode", rootNode);
+ this.set("onTouchStart", onTouchStart);
+ });
},
removeEventListeners() { | moving setting component properties to afterRender | tsubik_ember-side-menu | train | js |
7e69d4dda089139057733ac9d757ded72508a731 | diff --git a/moa/src/main/java/com/github/javacliparser/gui/ListOptionEditComponent.java b/moa/src/main/java/com/github/javacliparser/gui/ListOptionEditComponent.java
index <HASH>..<HASH> 100644
--- a/moa/src/main/java/com/github/javacliparser/gui/ListOptionEditComponent.java
+++ b/moa/src/main/java/com/github/javacliparser/gui/ListOptionEditComponent.java
@@ -21,8 +21,6 @@ package com.github.javacliparser.gui;
import com.github.javacliparser.Option;
-import javax.swing.*;
-
/**
* An OptionEditComponent that lets the user edit a list option.
*
@@ -31,8 +29,6 @@ import javax.swing.*;
*/
public class ListOptionEditComponent extends StringOptionEditComponent {
- protected StringOptionEditComponent component;
-
private static final long serialVersionUID = 1L;
public ListOptionEditComponent(Option option) { | Remove unused import and variable
Commit b<I>d2a fixed the ListOptionEditComponent, but missed to remove
an unused import and a variable. | Waikato_moa | train | java |
ac98d3b3f67b7477e6e4f82897ee7caf190535ea | diff --git a/tests/handler-helper.tests.js b/tests/handler-helper.tests.js
index <HASH>..<HASH> 100644
--- a/tests/handler-helper.tests.js
+++ b/tests/handler-helper.tests.js
@@ -959,7 +959,6 @@ test('handler-helper.findHandler', function(t) {
//<editor-fold desc="Assert">
return promise.then(function (result) {
// Log.error(reply.args[0]);
- debugger;
t.deepEqual(result, "TEST1", "returns single result");
})
//</editor-fold> | Remove debugger statement left in find tests | JKHeadley_rest-hapi | train | js |
97cec8604a9c3091acd0007d8244113c05398412 | diff --git a/src/core/core.helpers.js b/src/core/core.helpers.js
index <HASH>..<HASH> 100644
--- a/src/core/core.helpers.js
+++ b/src/core/core.helpers.js
@@ -758,7 +758,7 @@
// Store the device pixel ratio so that we can go backwards in `destroy`.
// The devicePixelRatio changes with zoom, so there are no guarantees that it is the same
// when destroy is called
- chart.originalDevicePixelRatio = window.devicePixelRatio;
+ chart.originalDevicePixelRatio = chart.originalDevicePixelRatio || window.devicePixelRatio;
}
},
//-- Canvas methods | Only store the original device context ratio once | chartjs_Chart.js | train | js |
50d80405a90c9c79f2a00b2c22cc6c1fdc333012 | diff --git a/superset/migrations/versions/18532d70ab98_fix_table_unique_constraint_in_mysql.py b/superset/migrations/versions/18532d70ab98_fix_table_unique_constraint_in_mysql.py
index <HASH>..<HASH> 100644
--- a/superset/migrations/versions/18532d70ab98_fix_table_unique_constraint_in_mysql.py
+++ b/superset/migrations/versions/18532d70ab98_fix_table_unique_constraint_in_mysql.py
@@ -27,15 +27,15 @@ revision = "18532d70ab98"
down_revision = "3fbbc6e8d654"
from alembic import op
+from sqlalchemy.dialects.mysql.base import MySQLDialect
def upgrade():
- try:
+ bind = op.get_bind()
+ if isinstance(bind.dialect, MySQLDialect):
# index only exists in mysql db
with op.get_context().autocommit_block():
op.drop_constraint("table_name", "tables", type_="unique")
- except Exception as ex:
- print(ex)
def downgrade(): | fix: alembic migration error msg trying to delete constraint on tables (#<I>)
* fix: alembic migration fails by deleting non existent constraint on tables
* Revert "fix: alembic migration fails by deleting non existent constraint on tables"
This reverts commit 3a<I>b<I>f4bf<I>c3de2d<I>e<I>bd3d<I>f.
* mantain migration but just for MySQL and add downgrade procedure
* skip the downgrade | apache_incubator-superset | train | py |
8c760d0cf46e13843bc5ece49f7ffbf2d8917c41 | diff --git a/cli/git.go b/cli/git.go
index <HASH>..<HASH> 100644
--- a/cli/git.go
+++ b/cli/git.go
@@ -4,7 +4,7 @@ import (
"bufio"
"bytes"
"fmt"
- "io/ioutil"
+ "io"
"os"
"os/exec"
"strings"
@@ -179,9 +179,16 @@ func runGitCredentials(args *docopt.Args) error {
return nil
}
- detailBytes, _ := ioutil.ReadAll(os.Stdin)
+ r := bufio.NewReader(os.Stdin)
details := make(map[string]string)
- for _, l := range bytes.Split(detailBytes, []byte("\n")) {
+ for {
+ l, _, err := r.ReadLine()
+ if err != nil && err != io.EOF {
+ return err
+ }
+ if len(l) == 0 {
+ break
+ }
kv := bytes.SplitN(l, []byte("="), 2)
if len(kv) == 2 {
details[string(kv[0])] = string(kv[1]) | cli: Treat empty line as EOF in git credential helper
This is correct, and may fix reported hangs with some versions of
git. | flynn_flynn | train | go |
347f57468ebfcb8bb937112b69fa9e818d5fe429 | diff --git a/src/sap.m/src/sap/m/Select.js b/src/sap.m/src/sap/m/Select.js
index <HASH>..<HASH> 100644
--- a/src/sap.m/src/sap/m/Select.js
+++ b/src/sap.m/src/sap/m/Select.js
@@ -531,7 +531,7 @@ sap.ui.define(['jquery.sap.global', './Bar', './Dialog', './InputBase', './Popov
oPicker = new Popover({
showArrow: false,
showHeader: false,
- placement: sap.m.PlacementType.Vertical,
+ placement: sap.m.PlacementType.VerticalPreferredBottom,
offsetX: 0,
offsetY: 0,
initialFocus: this, | [INTERNAL] sap.m.Select: improve dropdown list vertical placement
Change-Id: If1d<I>e3dc<I>aa5a<I>f9a7d<I>e<I> | SAP_openui5 | train | js |
f6c9a0591c06dfd246a4e5499fed8812ccc5293c | diff --git a/main.py b/main.py
index <HASH>..<HASH> 100644
--- a/main.py
+++ b/main.py
@@ -10,8 +10,12 @@ import sys
if (__name__ == "__main__"):
RUN_FROM_MAIN = False
- HOST = '192.168.200.83'
+
+ HOST = '127.0.0.1'
PORT = 7777
+
+ if (len(sys.argv) > 1):
+ HOST = sys.argv[1]
ADDR = (HOST, PORT)
protocol = 102
@@ -54,7 +58,7 @@ if (__name__ == "__main__"):
client.close()
else:
- bot = TerraBot("127.0.0.1")
+ bot = TerraBot(HOST)
bot.start()
while threading.active_count() > 0:
time.sleep(0.1) | You can now specify an ip by CLI | flammified_terrabot | train | py |
82689e758d15aad49c7ca6f9c64eed5ea82760b7 | diff --git a/fluent_contents/models/fields.py b/fluent_contents/models/fields.py
index <HASH>..<HASH> 100644
--- a/fluent_contents/models/fields.py
+++ b/fluent_contents/models/fields.py
@@ -224,6 +224,7 @@ else:
# Taking the same fix as applied in http://south.aeracode.org/ticket/414
_name_re = "^" + __name__.replace(".", "\.")
add_ignored_fields((
+ _name_re + "\.PlaceholderField",
_name_re + "\.PlaceholderRelation",
_name_re + "\.ContentItemRelation",
)) | Add PlaceholderField to the south ignore list.
Fixes south warnings while creating migrations in external apps. | django-fluent_django-fluent-contents | train | py |
8307c09b92aa4dbf7e375b2fdea03fa2629068fa | diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -1,7 +1,7 @@
"""
BigchainDB: A Scalable Blockchain Database
-For full docs visit https://bigchaindb.readthedocs.org
+For full docs visit https://docs.bigchaindb.com
"""
from setuptools import setup, find_packages | Updated docs URL in setup.py | bigchaindb_bigchaindb | train | py |
482139fd3583ba0c77a53298d8e5bf3f9722462d | diff --git a/src/BulkUploader/BulkUploadHandler.php b/src/BulkUploader/BulkUploadHandler.php
index <HASH>..<HASH> 100644
--- a/src/BulkUploader/BulkUploadHandler.php
+++ b/src/BulkUploader/BulkUploadHandler.php
@@ -93,7 +93,8 @@ class BulkUploadHandler extends RequestHandler
$fileRelationName = $this->component->getFileRelationName($this->gridField);
$record->{"{$fileRelationName}ID"} = $fileID;
-
+ $record->write(); //HasManyList call write on record but not ManyManyList, so we call it here again
+
$this->gridField->list->add($record);
if ($this->component->getAutoPublishDataObject() && $record->hasExtension('Versioned')) | FIX #<I> force write image ID
ManyManyList::add did not call write on dataObject, so the image was never attached | colymba_GridFieldBulkEditingTools | train | php |
4107f00d9e3cf76a90e756dc6d45d912731e6514 | diff --git a/src/Place/LocalPlaceService.php b/src/Place/LocalPlaceService.php
index <HASH>..<HASH> 100644
--- a/src/Place/LocalPlaceService.php
+++ b/src/Place/LocalPlaceService.php
@@ -20,7 +20,7 @@ class LocalPlaceService extends LocalEntityService implements PlaceServiceInterf
RepositoryInterface $entityRepository,
RelationsRepository $placeRelationsRepository,
IriGeneratorInterface $iriGenerator
- ){
+ ) {
parent::__construct($documentRepository, $entityRepository, $iriGenerator);
$this->placeRelationsRepository = $placeRelationsRepository;
@@ -35,4 +35,4 @@ class LocalPlaceService extends LocalEntityService implements PlaceServiceInterf
$organizerId
);
}
-}
\ No newline at end of file
+}
diff --git a/src/Place/PlaceServiceInterface.php b/src/Place/PlaceServiceInterface.php
index <HASH>..<HASH> 100644
--- a/src/Place/PlaceServiceInterface.php
+++ b/src/Place/PlaceServiceInterface.php
@@ -9,4 +9,4 @@ interface PlaceServiceInterface
* @return string[]
*/
public function placesOrganizedByOrganizer($organizerId);
-}
\ No newline at end of file
+} | III-<I> Fixed coding standards. | cultuurnet_udb3-php | train | php,php |
11171458ef2ee49fd1d7a08e3e01defd8c917cf6 | diff --git a/examples/commandline/sonoshell.py b/examples/commandline/sonoshell.py
index <HASH>..<HASH> 100644
--- a/examples/commandline/sonoshell.py
+++ b/examples/commandline/sonoshell.py
@@ -47,10 +47,19 @@ if __name__ == '__main__':
if (len(sys.argv) > 3):
operator = sys.argv[3].lower()
volume = sonos.volume()
- if (operator == '+'):
- print sonos.volume(volume + 1)
- elif (operator == '-'):
- print sonos.volume(volume - 1)
+ x = 1
+ if len(operator) > 1:
+ x = int(''.join(n for n in operator if n.isdigit()))
+ if (operator.find('+') != -1):
+ if (volume + x) > 100:
+ x = 1
+ print sonos.volume(volume + x)
+ print sonos.volume()
+ elif (operator.find('-') != -1):
+ if (volume -x) < 0:
+ x = 1
+ print sonos.volume(volume - x)
+ print sonos.volume()
else:
print "Valid operators for volume are + and -"
else: | Added possibility for specifying jump factor.
- Defaulting it the factor to 1 when volume + factor <0 or ><I> | amelchio_pysonos | train | py |
7e1c66d837d8d2e27645d443f067897a14c3a1ca | diff --git a/src/Psalm/Internal/Algebra.php b/src/Psalm/Internal/Algebra.php
index <HASH>..<HASH> 100644
--- a/src/Psalm/Internal/Algebra.php
+++ b/src/Psalm/Internal/Algebra.php
@@ -90,6 +90,12 @@ class Algebra
{
$clause_count = count($clauses);
+ //65536 seems to be a significant threshold, when put at 65537, the code https://psalm.dev/r/216f362ea6 goes
+ //from seconds in analysis to many minutes
+ if ($clause_count > 65536) {
+ return [];
+ }
+
if ($clause_count > 50) {
$all_has_unknown = true; | add limit to the number of clauses simplifyCNF can handle | vimeo_psalm | train | php |
da9fca296f5ac400e887ad1413f814d9004b635f | diff --git a/log.go b/log.go
index <HASH>..<HASH> 100644
--- a/log.go
+++ b/log.go
@@ -82,3 +82,13 @@ func WithStacktrace(l *ZapEventLogger, level LogLevel) *ZapEventLogger {
copyLogger.skipLogger = *copyLogger.SugaredLogger.Desugar().WithOptions(zap.AddCallerSkip(1)).Sugar()
return ©Logger
}
+
+// WithSkip returns a new logger that skips the specified number of stack frames when reporting the
+// line/file.
+func WithSkip(l *ZapEventLogger, skip int) *ZapEventLogger {
+ copyLogger := *l
+ copyLogger.SugaredLogger = *copyLogger.SugaredLogger.Desugar().
+ WithOptions(zap.AddCallerSkip(skip)).Sugar()
+ copyLogger.skipLogger = *copyLogger.SugaredLogger.Desugar().WithOptions(zap.AddCallerSkip(1)).Sugar()
+ return ©Logger
+} | feat: add logger option to skip a number of stack frames
This is useful, e.g., when the logger will always be called from some
wrapper. | ipfs_go-log | train | go |
fd648d55a7eeb26f4d5b55bf6545340f36356365 | diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -12,7 +12,7 @@ def read(fname):
setup(
name='visitor',
- version='0.1.3.dev1',
+ version='0.1.4.dev1',
description='A tiny pythonic visitor implementation.',
long_description=read('README.rst'),
author='Marc Brinkmann', | Start developing version <I>.dev1 (after release of <I>) | mbr_visitor | train | py |
ec55be0e96af6c07ddecb38529eeab9f72498348 | diff --git a/src/Support/ServiceProvider.php b/src/Support/ServiceProvider.php
index <HASH>..<HASH> 100644
--- a/src/Support/ServiceProvider.php
+++ b/src/Support/ServiceProvider.php
@@ -26,9 +26,9 @@ class ServiceProvider extends BaseServiceProvider
private function registerGuzzleSubscriber()
{
// Register a log subscriber with every Guzzle client.
- $this->app->bind('GuzzleHttp\Client', function () {
+ $this->app->bind('GuzzleHttp\Client', function ($app, $params) {
// Create new client.
- $client = new Client;
+ $client = new Client(array_shift($params) ?: []);
/** @var DebugBar $debugBar */
$debugBar = $this->app->make('debugbar'); | Allow to do app->make with parameters to pass to the constructor of guzzle client | hannesvdvreken_guzzle-debugbar | train | php |
6ff710ca7a15ec7ca7508550532dbee0baf39bd2 | diff --git a/featuretests/processes_test.go b/featuretests/processes_test.go
index <HASH>..<HASH> 100644
--- a/featuretests/processes_test.go
+++ b/featuretests/processes_test.go
@@ -52,6 +52,8 @@ func initProcessesSuites() {
var (
repoDir = testcharms.Repo.Path()
userInfo *user.User
+ // Set this to true to prevent the test env from being cleaned up.
+ procsDoNotCleanUp = false
)
type processesSuite struct {
@@ -541,9 +543,6 @@ func (env *procsEnviron) addService(c *gc.C, charmName, serviceName string) *pro
return svc
}
-// Set the to true to prevent the test local env from being cleaned up.
-var procsDoNotCleanUp = false
-
func (env *procsEnviron) destroy(c *gc.C) {
if procsDoNotCleanUp {
return | Move a global up with the other globals. | juju_juju | train | go |
615e1c9a139598a169b282ba28ed5fa848a8b3ca | diff --git a/lib/mandrill/web_hook/processor.rb b/lib/mandrill/web_hook/processor.rb
index <HASH>..<HASH> 100644
--- a/lib/mandrill/web_hook/processor.rb
+++ b/lib/mandrill/web_hook/processor.rb
@@ -40,7 +40,7 @@ class Mandrill::WebHook::Processor
def authentic?(request)
result = true
if callback_host && (keys = callback_host.class.mandrill_webhook_keys).present?
- expected_signature = callback_host.request.headers['HTTP_X_MANDRILL_SIGNATURE']
+ expected_signature = request.headers['HTTP_X_MANDRILL_SIGNATURE']
keys.each do |key|
signature = generate_signature(key, request.original_url, request.params)
result = (signature == expected_signature) | Slight refactor to consistently use the request passed as a parameter instead of pulling the request off the callback_host in one place. | evendis_mandrill-rails | train | rb |
5cbafba457ad7628332ed63e9fd9d7b3a9d75697 | diff --git a/pkg/kubelet/cm/devicemanager/manager.go b/pkg/kubelet/cm/devicemanager/manager.go
index <HASH>..<HASH> 100644
--- a/pkg/kubelet/cm/devicemanager/manager.go
+++ b/pkg/kubelet/cm/devicemanager/manager.go
@@ -231,7 +231,7 @@ func (m *ManagerImpl) Start(activePods ActivePodsFunc, sourcesReady config.Sourc
}
socketPath := filepath.Join(m.socketdir, m.socketname)
- os.MkdirAll(m.socketdir, 0755)
+ os.MkdirAll(m.socketdir, 0750)
if selinux.SELinuxEnabled() {
if err := selinux.SetFileLabel(m.socketdir, config.KubeletPluginsDirSELinuxLabel); err != nil {
klog.Warningf("Unprivileged containerized plugins might not work. Could not set selinux context on %s: %v", m.socketdir, err) | change directory permissions from <I> to <I> | kubernetes_kubernetes | train | go |
a9eea2187551c52696a6376302b624115fd1534b | diff --git a/lib/core_ext/array.rb b/lib/core_ext/array.rb
index <HASH>..<HASH> 100644
--- a/lib/core_ext/array.rb
+++ b/lib/core_ext/array.rb
@@ -1,5 +1,7 @@
-class Array
- def sum
- inject( nil ) { |sum,x| sum ? sum+x : x }
+unless [].respond_to?(:sum)
+ class Array
+ def sum
+ inject( nil ) { |sum,x| sum ? sum+x : x }
+ end
end
-end
\ No newline at end of file
+end | Only patches the array with a sum method if none is present
e. g. Rails already provides this. In Ruby <I> one should use a refinement. | benschwarz_amnesia | train | rb |
ba2e653fbfb3ba9a765e358395bdbe745964e188 | diff --git a/master/buildbot/schedulers/forcesched.py b/master/buildbot/schedulers/forcesched.py
index <HASH>..<HASH> 100644
--- a/master/buildbot/schedulers/forcesched.py
+++ b/master/buildbot/schedulers/forcesched.py
@@ -20,7 +20,7 @@ import email.utils as email_utils
from buildbot.process.properties import Properties
from buildbot.schedulers import base
-from buildbot.config import error
+from buildbot import config
class ValidationError(ValueError):
pass
diff --git a/master/buildbot/test/unit/test_schedulers_forcesched.py b/master/buildbot/test/unit/test_schedulers_forcesched.py
index <HASH>..<HASH> 100644
--- a/master/buildbot/test/unit/test_schedulers_forcesched.py
+++ b/master/buildbot/test/unit/test_schedulers_forcesched.py
@@ -444,7 +444,7 @@ class TestForceScheduler(scheduler.SchedulerMixin, unittest.TestCase):
klass=NestedParameter, fields=fields, name='')
def test_bad_reason(self):
- self.assertRaises(ValidationError, ForceScheduler,
+ self.assertRaises(config.ConfigErrors, ForceScheduler,
name='testsched', builderNames=[],
codebases=['bar'], reason="foo") | - Fixed test. And fixed import in forcesched.py. | buildbot_buildbot | train | py,py |
90429be98ad6323af26f867cda61ebba034e1c2a | diff --git a/addr_manager.go b/addr_manager.go
index <HASH>..<HASH> 100644
--- a/addr_manager.go
+++ b/addr_manager.go
@@ -109,7 +109,7 @@ func (mgr *AddrManager) AddAddrs(p ID, addrs []ma.Multiaddr, ttl time.Duration)
continue
}
- addrstr := addr.String()
+ addrstr := string(addr.Bytes())
a, found := amap[addrstr]
if !found || exp.After(a.TTL) {
amap[addrstr] = expiringAddr{Addr: addr, TTL: exp}
@@ -144,7 +144,7 @@ func (mgr *AddrManager) SetAddrs(p ID, addrs []ma.Multiaddr, ttl time.Duration)
continue
}
// re-set all of them for new ttl.
- addrs := addr.String()
+ addrs := string(addr.Bytes())
if ttl > 0 {
amap[addrs] = expiringAddr{Addr: addr, TTL: exp} | don't use string method for key of maps | libp2p_go-libp2p-peerstore | train | go |
76c89c165947526fc3839477d8a9cd2cbbffe91f | diff --git a/nion/swift/model/Profile.py b/nion/swift/model/Profile.py
index <HASH>..<HASH> 100644
--- a/nion/swift/model/Profile.py
+++ b/nion/swift/model/Profile.py
@@ -142,10 +142,11 @@ class ProjectReference(Observable.Observable, Persistence.PersistentObject):
if not self.__has_project_info_been_read:
try:
project_storage_system = self.make_storage(profile_context)
+ if project_storage_system:
+ project_storage_system.load_properties()
except Exception:
project_storage_system = None
if project_storage_system:
- project_storage_system.load_properties()
with contextlib.closing(Project.Project(project_storage_system)) as project:
if self.project_uuid != project.project_uuid:
self.project_uuid = project.project_uuid | Fix #<I>. Allow launch to proceed with corrupt project file. | nion-software_nionswift | train | py |
c97301e51c7a2cfceed484354dc32f6097a50789 | diff --git a/js/ui/colorpicker.js b/js/ui/colorpicker.js
index <HASH>..<HASH> 100644
--- a/js/ui/colorpicker.js
+++ b/js/ui/colorpicker.js
@@ -28,12 +28,13 @@
*/
var igv = (function (igv) {
- var columnCount = 5;
+ var columnCount = 8;
igv.ColorPicker = function ($parent, userPalette) {
var self = this,
palette = userPalette || ["#666666", "#0000cc", "#009900", "#cc0000", "#ffcc00", "#9900cc", "#00ccff", "#ff6600", "#ff6600"],
+ //palette = ["#666666", "#0000cc", "#009900", "#cc0000", "#ffcc00", "#9900cc", "#00ccff", "#ff6600", "#ff6600"],
rowCount = Math.ceil(palette.length / columnCount),
rowIndex; | Trello. Color picker issues. There are now 8 color chips per row. | igvteam_igv.js | train | js |
4ef113ace53ff7be22ff855b71fec81b85d52ffb | diff --git a/djcelery_email/conf.py b/djcelery_email/conf.py
index <HASH>..<HASH> 100644
--- a/djcelery_email/conf.py
+++ b/djcelery_email/conf.py
@@ -1,4 +1,3 @@
-from django.conf import settings
from appconf import AppConf
class DjangoCeleryEmailAppConf(AppConf):
diff --git a/djcelery_email/tasks.py b/djcelery_email/tasks.py
index <HASH>..<HASH> 100644
--- a/djcelery_email/tasks.py
+++ b/djcelery_email/tasks.py
@@ -1,3 +1,4 @@
+from django.conf import settings
from django.core.mail import get_connection, EmailMessage, EmailMultiAlternatives
try:
@@ -5,7 +6,7 @@ try:
except ImportError:
from celery.decorators import task as shared_task
-from djcelery_email.conf import settings
+import djcelery_email.conf # Make sure our AppConf is loaded properly.
# Messages *must* be dicts, not instances of the EmailMessage class
# This is because we expect Celery to use JSON encoding, and we want to prevent | Undo an overzealous cleanup.
Revert "*Actually* use appconf correctly, as opposed to how it was being used."
This reverts commit ecc8f<I>d3c8cef<I>be<I>ee<I>bc1c<I>c<I>f5cf. | pmclanahan_django-celery-email | train | py,py |
fb4b533f2b31daebc7ed57c16228458def3d2af9 | diff --git a/src/android/com/adobe/phonegap/push/GCMIntentService.java b/src/android/com/adobe/phonegap/push/GCMIntentService.java
index <HASH>..<HASH> 100644
--- a/src/android/com/adobe/phonegap/push/GCMIntentService.java
+++ b/src/android/com/adobe/phonegap/push/GCMIntentService.java
@@ -762,6 +762,7 @@ public class GCMIntentService extends GcmListenerService implements PushConstant
try {
URL url = new URL(strURL);
HttpURLConnection connection = (HttpURLConnection) url.openConnection();
+ connection.setConnectTimeout(15000);
connection.setDoInput(true);
connection.connect();
InputStream input = connection.getInputStream(); | :bug::penguin: Issue #<I>: Notification delay caused by icon bitmap timeout | phonegap_phonegap-plugin-push | train | java |
ea0785a9c501828a9f2e4b60f08ac248cd04fb38 | diff --git a/src/org/dmfs/xmlobjects/builder/reflection/ReflectionObjectBuilder.java b/src/org/dmfs/xmlobjects/builder/reflection/ReflectionObjectBuilder.java
index <HASH>..<HASH> 100644
--- a/src/org/dmfs/xmlobjects/builder/reflection/ReflectionObjectBuilder.java
+++ b/src/org/dmfs/xmlobjects/builder/reflection/ReflectionObjectBuilder.java
@@ -19,6 +19,8 @@ package org.dmfs.xmlobjects.builder.reflection;
import java.io.IOException;
import java.lang.reflect.Field;
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.net.URI;
import java.net.URISyntaxException;
@@ -294,6 +296,22 @@ public class ReflectionObjectBuilder<T> extends AbstractObjectBuilder<T>
{
field.set(object, new URI(value));
}
+ else if (Enum.class.isAssignableFrom(fieldType))
+ {
+ try
+ {
+ Method valueOf = fieldType.getMethod("valueOf", String.class);
+ field.set(object, valueOf.invoke(fieldType, value));
+ }
+ catch (NoSuchMethodException e)
+ {
+ // this should not happen, we've checked that fieldType is an enum
+ }
+ catch (InvocationTargetException e)
+ {
+ // this should not happen, we've checked that fieldType is an enum
+ }
+ }
}
catch (NumberFormatException e)
{ | Extend ReflectionObjectBuilder with support for enum fields. | dmfs_xmlobjects | train | java |
c932e475faba58e70b4928bb2cfe1afa519b7a25 | diff --git a/spec/yelp/client/search_spec.rb b/spec/yelp/client/search_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/yelp/client/search_spec.rb
+++ b/spec/yelp/client/search_spec.rb
@@ -1,10 +1,10 @@
require 'yelp'
describe Yelp::Client::Search do
- let(:keys) { Hash[consumer_key: 'abc',
- consumer_secret: 'def',
- token: 'ghi',
- token_secret: 'jkl'] }
+ let(:keys) { Hash[consumer_key: ENV['YELP_CONSUMER_KEY'],
+ consumer_secret: ENV['YELP_CONSUMER_SECRET'],
+ token: ENV['YELP_TOKEN'],
+ token_secret: ENV['YELP_TOKEN_SECRET']] }
let(:location) { 'San Francisco' }
let(:params) { Hash[term: 'restaurants',
category_filter: 'discgolf'] } | Use ENV for api keys in testing | Yelp_yelp-ruby | train | rb |
47f75d24df5bcf2775b92ed4e6cefb0c4cfa6a02 | diff --git a/src/methods/dd.js b/src/methods/dd.js
index <HASH>..<HASH> 100644
--- a/src/methods/dd.js
+++ b/src/methods/dd.js
@@ -1,8 +1,7 @@
'use strict';
module.exports = function dd() {
- // eslint-disable-next-line
- console.log(this.all());
+ this.dump();
if (typeof process !== 'undefined') {
process.exit(1);
diff --git a/test/methods/dd_test.js b/test/methods/dd_test.js
index <HASH>..<HASH> 100644
--- a/test/methods/dd_test.js
+++ b/test/methods/dd_test.js
@@ -12,7 +12,7 @@ module.exports = (it, expect, collect) => {
mockConsole.reset();
mockProcess.reset();
- expect(mockConsole.calls).to.eql([[[1, 2, 3]]]);
+ expect(mockConsole.calls).to.eql([[collect([1, 2, 3])]]);
expect(mockProcess.calls).to.eql([[1]]);
});
}; | dd was producing output inconsistent with Laravel | ecrmnn_collect.js | train | js,js |
a43b17837eb500c45753148471bb7f3aa2924b21 | diff --git a/addon/mixins/keen-tracker.js b/addon/mixins/keen-tracker.js
index <HASH>..<HASH> 100644
--- a/addon/mixins/keen-tracker.js
+++ b/addon/mixins/keen-tracker.js
@@ -10,11 +10,15 @@ export default Ember.Mixin.create({
session: Ember.inject.service(),
// Add this mixin to your route, and the afterModel hook will send pageviews to keen
- afterModel(model) { // Using afterModel hook so node info can be sent to keen
+ afterModel(model, transition) { // Using afterModel hook so node info can be sent to keen
window.contextVars = {};
window.contextVars.currentUser = this.userContextVars();
window.contextVars.node = this.nodeContextVars(model); // model may not be a node, in which case, only id might be extracted
- return this.KeenTracker().getInstance().trackPageView();
+ let transitionData = {
+ page: transition.targetName,
+ queryParams: transition.queryParams
+ };
+ return this.KeenTracker().getInstance().trackPageView(transitionData);
},
actions: {
//keenClick action can be used in template | Pass transition data when tracking page view. | CenterForOpenScience_ember-osf | train | js |
dff06a3a334314ea6a7518251b8c858e78d282ac | diff --git a/spyder/plugins/completion/kite/providers/document.py b/spyder/plugins/completion/kite/providers/document.py
index <HASH>..<HASH> 100644
--- a/spyder/plugins/completion/kite/providers/document.py
+++ b/spyder/plugins/completion/kite/providers/document.py
@@ -195,19 +195,22 @@ class DocumentProvider:
signatures = call['signatures']
arg_idx = call['arg_index']
- signature = signatures[0]
parameters = []
names = []
- logger.debug(signature)
- for arg in signature['args']:
- parameters.append({
- 'label': arg['name'],
- 'documentation': ''
- })
- names.append(arg['name'])
-
- func_args = ', '.join(names)
- call_label = '{0}({1})'.format(call_label, func_args)
+
+ logger.debug(signatures)
+ if len(signatures) > 0:
+ signature = signatures[0]
+ logger.debug(signature)
+ for arg in signature['args']:
+ parameters.append({
+ 'label': arg['name'],
+ 'documentation': ''
+ })
+ names.append(arg['name'])
+
+ func_args = ', '.join(names)
+ call_label = '{0}({1})'.format(call_label, func_args)
base_signature = {
'label': call_label, | Kite Completion: Fix signature processing when no signature is retrieved | spyder-ide_spyder | train | py |
4659b0794ce5a46e3247011bb0c9bd1706acf0ab | diff --git a/internal/exec/exec.go b/internal/exec/exec.go
index <HASH>..<HASH> 100644
--- a/internal/exec/exec.go
+++ b/internal/exec/exec.go
@@ -313,6 +313,8 @@ func (r *Request) execList(ctx context.Context, sels []selected.Selection, typ *
entryouts := make([]bytes.Buffer, l)
if selected.HasAsyncSel(sels) {
+ // Limit the number of concurrent goroutines spawned as it can lead to large
+ // memory spikes for large lists.
concurrency := cap(r.Limiter)
sem := make(chan struct{}, concurrency)
for i := 0; i < l; i++ { | Add comment explaining why we limit concurrency | graph-gophers_graphql-go | train | go |
1f7078856c8c925fbf3d22051ebe7f6ee9114048 | diff --git a/login.php b/login.php
index <HASH>..<HASH> 100644
--- a/login.php
+++ b/login.php
@@ -94,9 +94,12 @@ case 'login':
Auth::login($user);
Log::addAuthenticationLog('Login: ' . Auth::user()->getUserName() . '/' . Auth::user()->getRealName());
- $WT_SESSION->timediff = $timediff;
- $WT_SESSION->locale = Auth::user()->getPreference('language');
- $WT_SESSION->theme_dir = Auth::user()->getPreference('theme');
+ $WT_SESSION->timediff = $timediff;
+ $WT_SESSION->locale = Auth::user()->getPreference('language');
+ $WT_SESSION->theme_dir = Auth::user()->getPreference('theme');
+ $WT_SESSION->activity_time = WT_TIMESTAMP;
+
+ Auth::user()->setPreference('sessiontime', WT_TIMESTAMP);
// If we’ve clicked login from the login page, we don’t want to go back there.
if (strpos($url, WT_SCRIPT_NAME) === 0) { | Session time is updated every 5 minutes. Force update on login, otherwise sessions shorter than 5 minutes may not be logged | fisharebest_webtrees | train | php |
ff43f7e66995fffc445e9843ada564a8c48c3cc3 | diff --git a/lib/sham_rack/http.rb b/lib/sham_rack/http.rb
index <HASH>..<HASH> 100644
--- a/lib/sham_rack/http.rb
+++ b/lib/sham_rack/http.rb
@@ -8,6 +8,8 @@ module ShamRack
@port = port
@rack_app = rack_app
end
+
+ attr_reader :address, :port, :rack_app
def start
yield self | Add accessors for address, port and app. | mdub_sham_rack | train | rb |
ecafcc493a1b9cd194c86adca6be42bb90be46fa | diff --git a/server/src/main/java/org/uiautomation/ios/server/simulator/IOSRealDeviceManager.java b/server/src/main/java/org/uiautomation/ios/server/simulator/IOSRealDeviceManager.java
index <HASH>..<HASH> 100644
--- a/server/src/main/java/org/uiautomation/ios/server/simulator/IOSRealDeviceManager.java
+++ b/server/src/main/java/org/uiautomation/ios/server/simulator/IOSRealDeviceManager.java
@@ -59,16 +59,11 @@ public class IOSRealDeviceManager implements IOSDeviceManager {
// TODO upgrade ?
// needs to re-install
- System.out.println("uninstall");
+ log.fine("uninstalling " + bundleId + " for " + service.getDeviceId());
service.uninstall(bundleId);
- try {
- Thread.sleep(1000);
- } catch (InterruptedException e) {
-
- }
- System.out.println("install");
+ log.fine("installing " + bundleId + " for " + service.getDeviceId());
service.install(((IPAApplication) aut).getIPAFile());
- System.out.println("installed");
+ log.fine(bundleId + " for " + service.getDeviceId() + " installed.");
} else {
throw new WebDriverException("only IPA apps can be used on a real device.");
} | adding getter for the service, and logginf the actions in real device manager. | ios-driver_ios-driver | train | java |
e7cc94a4552149e6b56419612df79f3492c9474b | diff --git a/actionpack/lib/action_view/helpers/url_helper.rb b/actionpack/lib/action_view/helpers/url_helper.rb
index <HASH>..<HASH> 100644
--- a/actionpack/lib/action_view/helpers/url_helper.rb
+++ b/actionpack/lib/action_view/helpers/url_helper.rb
@@ -285,10 +285,6 @@ module ActionView
# * <tt>:confirm</tt> - This will use the unobtrusive JavaScript driver to
# prompt with the question specified. If the user accepts, the link is
# processed normally, otherwise no action is taken.
- # * <tt>:disable_with</tt> - This will use the unobtrusive JavaScript driver to
- # set the input value to the message specified when the user clicks the button and disable the
- # button while the ajax call is executed. Then the input value is set to the original and the button
- # is enabled
# * <tt>:remote</tt> - If set to true, will allow the Unobtrusive JavaScript drivers to control the
# submit behaviour. By default this behaviour is an ajax submit.
# | Revert commit <I>c<I>c8e<I>c Need more research on disable-with and data-attributes | rails_rails | train | rb |
bd62ef6411604e593861621525c4f9e0140a1cdd | diff --git a/fundingmanager.go b/fundingmanager.go
index <HASH>..<HASH> 100644
--- a/fundingmanager.go
+++ b/fundingmanager.go
@@ -2573,8 +2573,9 @@ func (f *fundingManager) handleInitFundingMsg(msg *initFundingMsg) {
}
fndgLog.Infof("Initiating fundingRequest(localAmt=%v, remoteAmt=%v, "+
- "capacity=%v, chainhash=%v, addr=%v, dustLimit=%v)", localAmt,
- msg.pushAmt, capacity, msg.chainHash, peerKey, ourDustLimit)
+ "capacity=%v, chainhash=%v, peer=%x, dustLimit=%v)", localAmt,
+ msg.pushAmt, capacity, msg.chainHash, peerKey.SerializeCompressed(),
+ ourDustLimit)
// First, we'll query the fee estimator for a fee that should get the
// commitment transaction confirmed by the next few blocks (conf target | funding: properly log pubkey of peer within handleInitFundingMsg
Before this commit, we would log the struct of the pubkey, rather than
the serialized compressed version. | lightningnetwork_lnd | train | go |
ad5a0c841f3633d0c37ef9639362027d76e4c32a | diff --git a/imagemounter/cli/__init__.py b/imagemounter/cli/__init__.py
index <HASH>..<HASH> 100644
--- a/imagemounter/cli/__init__.py
+++ b/imagemounter/cli/__init__.py
@@ -47,6 +47,7 @@ class CheckAction(argparse.Action):
self._check_command("affuse", "afflib-tools", "AFF images (partially covered by xmount)")
self._check_command("vmware-mount", why="VMWare disks")
self._check_command("mountavfs", "avfs", "compressed disk images")
+ self._check_command("qemu-nbd", "qemu-utils", "Qcow2 images")
print("-- Detecting volumes and volume types (at least one required) --")
self._check_command("mmls", "sleuthkit")
self._check_module("pytsk3")
@@ -68,7 +69,6 @@ class CheckAction(argparse.Action):
self._check_command("cryptsetup", "cryptsetup", "LUKS containers")
self._check_command("bdemount", "libbde-utils", "Bitlocker Drive Encryption volumes")
self._check_command("vshadowmount", "libvshadow-utils", "NTFS volume shadow copies")
- self._check_command("qemu-nbd", "qemu-utils", "Qcow2 images")
parser.exit() | Move qemu-nbd to disk image section | ralphje_imagemounter | train | py |
3c31c38667484cb6a25edcb76b7aa391ae14b3a9 | diff --git a/lib/arjdbc/mysql/connection_methods.rb b/lib/arjdbc/mysql/connection_methods.rb
index <HASH>..<HASH> 100644
--- a/lib/arjdbc/mysql/connection_methods.rb
+++ b/lib/arjdbc/mysql/connection_methods.rb
@@ -133,6 +133,7 @@ ArJdbc::ConnectionMethods.module_eval do
#"macroman" => "macRoman",
"cp852" => "CP852",
"latin7" => "ISO-8859-13",
+ "utf8" => "UTF-8",
"utf8mb4" => "UTF-8",
"cp1251" => "Windows-1251",
"utf16" => "UTF-16", | [mysql] let us try re-mapping 'utf8' into 'UTF-8' for JDBC driver(s) | jruby_activerecord-jdbc-adapter | train | rb |
f2d9fd37dd03a03a1e97b22db59835d72d61c3de | diff --git a/polysquare_setuptools_lint/__init__.py b/polysquare_setuptools_lint/__init__.py
index <HASH>..<HASH> 100644
--- a/polysquare_setuptools_lint/__init__.py
+++ b/polysquare_setuptools_lint/__init__.py
@@ -219,7 +219,7 @@ def _run_prospector_on(filenames, tools, ignore_codes=None):
def _file_is_test(filename):
"""Return true if file is a test."""
- is_test = re.compile(r"^.*test[^{0}]*.py$".format(os.path.sep))
+ is_test = re.compile(r"^.*test[^{0}]*.py$".format(re.escape(os.path.sep)))
return bool(is_test.match(filename)) | Escape path separator.
On Windows it is a backslash, which if left unescaped in a regex
can cause all sorts of havoc. | polysquare_polysquare-setuptools-lint | train | py |
676adf62b8d4d5dc83a7cf91eff0eb6d2eb1b9a8 | diff --git a/pkg/resource/plugin/plugin.go b/pkg/resource/plugin/plugin.go
index <HASH>..<HASH> 100644
--- a/pkg/resource/plugin/plugin.go
+++ b/pkg/resource/plugin/plugin.go
@@ -176,7 +176,7 @@ func newPlugin(ctx *Context, bin string, prefix string, args []string) (*plugin,
go runtrace(plug.Stdout, false, stdoutDone)
// Now that we have the port, go ahead and create a gRPC client connection to it.
- conn, err := grpc.Dial(":"+port, grpc.WithInsecure(), grpc.WithUnaryInterceptor(
+ conn, err := grpc.Dial("127.0.0.1:"+port, grpc.WithInsecure(), grpc.WithUnaryInterceptor(
rpcutil.OpenTracingClientInterceptor(),
))
if err != nil { | Use an explicit address when dialing plugins (#<I>)
This is necessary in order for gRPC's proxy support to properly respect
NO_PROXY.
Fixes #<I>. | pulumi_pulumi | train | go |
ca17b08ca1d5fca462d0c55398f3762d5bcefd9a | diff --git a/tests/Test.php b/tests/Test.php
index <HASH>..<HASH> 100644
--- a/tests/Test.php
+++ b/tests/Test.php
@@ -33,7 +33,7 @@ class GroupTest extends UnitTestCase
}
return true;
}
- public function run(HtmlReporter $reporter, $filter)
+ public function run(HtmlReporter $reporter = null, $filter = null)
{
$reporter->paintHeader();
foreach ($this->_testCases as $k => $testCase) {
@@ -160,7 +160,7 @@ class UnitTestCase
}
$this->_failed++;
}
- public function run()
+ public function run(HtmlReporter $reporter = null, $filter = null)
{
foreach (get_class_methods($this) as $method) {
if (substr($method, 0, 4) === 'test') { | Strict standards: GroupTest::run and UnitTest::run should have the same signature | doctrine_annotations | train | php |
cc2d02d7de8c43131dfb1ca141ff8be1a5f88acf | diff --git a/app/setup.php b/app/setup.php
index <HASH>..<HASH> 100755
--- a/app/setup.php
+++ b/app/setup.php
@@ -92,6 +92,12 @@ add_action('after_setup_theme', function () {
add_theme_support('custom-units', 'rem', 'vw');
/**
+ * Enable support for custom block spacing controls.
+ * @link https://developer.wordpress.org/block-editor/developers/themes/theme-support/#spacing-control
+ */
+ add_theme_support('custom-spacing');
+
+ /**
* Disable custom colors in the editor.
* @link https://developer.wordpress.org/block-editor/developers/themes/theme-support/#disabling-custom-colors-in-block-color-palettes
*/
@@ -116,12 +122,6 @@ add_action('after_setup_theme', function () {
remove_theme_support('core-block-patterns');
/**
- * Enable support for custom block spacing controls.
- * @link https://developer.wordpress.org/block-editor/developers/themes/theme-support/#spacing-control
- */
- add_theme_support('custom-spacing');
-
- /**
* Enable plugins to manage the document title.
* @link https://developer.wordpress.org/reference/functions/add_theme_support/#title-tag
*/ | chore(theme): Move `custom-spacing` up for visibility | roots_sage | train | php |
d3cddc53ada0745f8f972cead0ca19559168d41d | diff --git a/app/server.js b/app/server.js
index <HASH>..<HASH> 100644
--- a/app/server.js
+++ b/app/server.js
@@ -23,7 +23,6 @@ exports = module.exports = function(container, settings, logger) {
}
exports['@implements'] = 'http://i.bixbyjs.org/http/Server';
-exports['@singleton'] = true;
exports['@require'] = [
'!container',
'http://i.bixbyjs.org/Settings', | Remove singleton from HTTP server. | bixbyjs_bixby-http | train | js |
1e0284d2d4f981b63e76c4c27c62f243e710031d | diff --git a/cub/models.py b/cub/models.py
index <HASH>..<HASH> 100644
--- a/cub/models.py
+++ b/cub/models.py
@@ -256,9 +256,10 @@ class SKU(CubObject):
pass
-class Order(CreatableObject, ListableObject):
+class Order(CreatableObject, UpdatableObject, RemovableObject, ListableObject):
pass
-class OrderItem(CreatableObject, ListableObject):
+class OrderItem(CreatableObject, UpdatableObject, RemovableObject,
+ ListableObject):
pass | make Order and OrderItem models also updatable and removable | praetoriandigital_cub-python | train | py |
9fd5c5197e7a06068b2fc261ae9b51980475a5f9 | diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -43,6 +43,7 @@ function replace(text, options) {
var includeRegExp = new RegExp(escapeRegExp(options.prefix) + "(.+?)" + escapeRegExp(options.suffix), "g");
+ var retVal = text;
var regExpResult;
while (regExpResult = includeRegExp.exec(text)) {
var fullMatch = regExpResult[0];
@@ -52,10 +53,10 @@ function replace(text, options) {
tokenValue = '';
}
if (tokenValue !== null) {
- text = text.replace(fullMatch, tokenValue);
+ retVal = retVal.replace(fullMatch, tokenValue);
}
}
- return text;
+ return retVal;
}
function escapeRegExp(text) { | fix a bug where tokens were not always being found (identified with the new test cases) | Pictela_gulp-token-replace | train | js |
c47eac168329f7cfee4ad98ba5051f26fac08e52 | diff --git a/lib/liquid/strainer.rb b/lib/liquid/strainer.rb
index <HASH>..<HASH> 100644
--- a/lib/liquid/strainer.rb
+++ b/lib/liquid/strainer.rb
@@ -14,7 +14,7 @@ module Liquid
# One of the strainer's responsibilities is to keep malicious method calls out
class Strainer < parent_object #:nodoc:
INTERNAL_METHOD = /^__/
- @@required_methods = Set.new([:__id__, :__send__, :respond_to?, :extend, :methods, :class, :object_id])
+ @@required_methods = Set.new([:__id__, :__send__, :respond_to?, :kind_of?, :extend, :methods, :singleton_methods, :class, :object_id])
# Ruby 1.9.2 introduces Object#respond_to_missing?, which is invoked by Object#respond_to?
@@required_methods << :respond_to_missing? if Object.respond_to? :respond_to_missing? | Add kind_of? and singleton_methods to Strainer so that Rubinius works | Shopify_liquid | train | rb |
054ba370f648b004eedcb1f4038b921691b7c8d2 | diff --git a/tests/TestCase.php b/tests/TestCase.php
index <HASH>..<HASH> 100644
--- a/tests/TestCase.php
+++ b/tests/TestCase.php
@@ -70,4 +70,20 @@ class TestCase extends \PHPUnit_Framework_TestCase
return $this->workDir;
}
+
+ /**
+ * Retrieve the path of a temp file within the temp dir of the test.
+ *
+ * @param string $name Optional name of the file.
+ *
+ * @return string
+ */
+ public function getTempFile($name = '')
+ {
+ if ('' === $name) {
+ $name = uniqid();
+ }
+
+ return $this->getTempDir() . DIRECTORY_SEPARATOR . $name;
+ }
} | Add new test method to obtain path to a temp file | tenside_core | train | php |
098f5c5b6f4d35ccd503af2907c30357aa02eb25 | diff --git a/spec/mongoid/association/constrainable_spec.rb b/spec/mongoid/association/constrainable_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/mongoid/association/constrainable_spec.rb
+++ b/spec/mongoid/association/constrainable_spec.rb
@@ -77,12 +77,26 @@ describe Mongoid::Association::Constrainable do
context 'when a string is passed' do
- let(:object) do
- BSON::ObjectId.new.to_s
+ context 'when the string represents an ObjectId' do
+
+ let(:object) do
+ BSON::ObjectId.new.to_s
+ end
+
+ it 'returns the object id' do
+ expect(result).to eq(BSON::ObjectId.from_string(object))
+ end
end
- it 'returns the object id' do
- expect(result).to eq(BSON::ObjectId.from_string(object))
+ context 'when the string does not represent an ObjectId' do
+
+ let(:object) do
+ 'some-other-string'
+ end
+
+ it 'returns the object' do
+ expect(result).to eq(object)
+ end
end
end | MONGOID-<I> Add test to ensure that only Strings representing ObjectIds are converted to ObjectIds | mongodb_mongoid | train | rb |
27b7ecebf58d3bcb8d3be88dfadd3cbe2530395c | diff --git a/cli/drivers/BedrockValetDriver.php b/cli/drivers/BedrockValetDriver.php
index <HASH>..<HASH> 100644
--- a/cli/drivers/BedrockValetDriver.php
+++ b/cli/drivers/BedrockValetDriver.php
@@ -48,6 +48,7 @@ class BedrockValetDriver extends BasicValetDriver
public function frontControllerPath($sitePath, $siteName, $uri)
{
$_SERVER['PHP_SELF'] = $uri;
+ $_SERVER['SERVER_NAME'] = $_SERVER['HTTP_HOST'];
if (strpos($uri, '/wp/') === 0) {
return is_dir($sitePath.'/web'.$uri) | Set SERVER_NAME variable for WordPress (Bedrock) driver
Fix for Wordpress (Bedrock) as in #<I> | laravel_valet | train | php |
2f1c63e548c58c799cfd07d4cdf7e6befe1a9a69 | diff --git a/drivers/shared/executor/executor_test.go b/drivers/shared/executor/executor_test.go
index <HASH>..<HASH> 100644
--- a/drivers/shared/executor/executor_test.go
+++ b/drivers/shared/executor/executor_test.go
@@ -157,8 +157,8 @@ func TestExecutor_Start_Wait_Failure_Code(pt *testing.T) {
require := require.New(t)
testExecCmd := testExecutorCommand(t)
execCmd, allocDir := testExecCmd.command, testExecCmd.allocDir
- execCmd.Cmd = "/bin/date"
- execCmd.Args = []string{"fail"}
+ execCmd.Cmd = "/bin/sh"
+ execCmd.Args = []string{"-c", "sleep 1; /bin/date fail"}
factory.configureExecCmd(t, execCmd)
defer allocDir.Destroy()
executor := factory.new(testlog.HCLogger(t)) | tests: ensure that test is long enough to configure cgroups | hashicorp_nomad | train | go |
95a6167e8f8709b470b1b54667a162eb6e0dfa2a | diff --git a/great_expectations/data_asset/data_asset.py b/great_expectations/data_asset/data_asset.py
index <HASH>..<HASH> 100644
--- a/great_expectations/data_asset/data_asset.py
+++ b/great_expectations/data_asset/data_asset.py
@@ -1155,7 +1155,7 @@ class DataAsset:
Counter(unexpected_list).most_common(
result_format["partial_unexpected_count"]
),
- key=lambda x: (-x[1], x[0]),
+ key=lambda x: (-x[1], str(x[0])),
)
]
except TypeError: | Updated sort so that secondary sort is by string to avoid TypeError when there is more than one unexpected type. (#<I>) | great-expectations_great_expectations | train | py |
0e2eeb7e3315e51881b5bd941165dbad1ca0a533 | diff --git a/deep.js b/deep.js
index <HASH>..<HASH> 100644
--- a/deep.js
+++ b/deep.js
@@ -212,6 +212,9 @@ define([
deep.client = {};
require("./lib/stores/chain");
+ deep.delay = function(ms){
+ return deep({}).delay(ms);
+ }
//_________________________________________________________________________________ | add deep.delay (front API) | deepjs_deepjs | train | js |
19df05f2da13bdc6957ef1adf0660aa9f61c89f7 | diff --git a/shinken/scheduler.py b/shinken/scheduler.py
index <HASH>..<HASH> 100644
--- a/shinken/scheduler.py
+++ b/shinken/scheduler.py
@@ -1723,6 +1723,7 @@ class Scheduler(object):
self.dump_config()
self.need_objects_dump = False
+ self.hook_point('scheduler_tick')
# WE must save the retention at the quit BY OURSELF | Add: a scheduler tick hook
So that shinken modules can use it to make some work on every scheduler tick. | Alignak-monitoring_alignak | train | py |
4d857700c5acd2b7e707ac7bd6de26cee56ccb54 | diff --git a/lib/json-content-demux.js b/lib/json-content-demux.js
index <HASH>..<HASH> 100644
--- a/lib/json-content-demux.js
+++ b/lib/json-content-demux.js
@@ -10,7 +10,8 @@
function jsonContentDemux(muxStr, options) {
// Find where the JSON ends
var delimiter = /\n\r?\n\r?/g,
- delimiterIndex = muxStr.search(delimiter);
+ result = delimiter.exec(muxStr),
+ delimiterIndex = delimiter.lastIndex;
// Fallback the dblLineBreakIndex
if (delimiterIndex === -1) {
@@ -20,7 +21,7 @@ function jsonContentDemux(muxStr, options) {
// Break up the json and content
var jsonStr = muxStr.slice(0, delimiterIndex) || '{}',
json = new Function('return ' + jsonStr + ';')(),
- content = muxStr.slice(muxStr);
+ content = muxStr.slice(delimiterIndex);
// Prepare a retObj and return
var retObj = { | Adjusted to use delimiter lastIndex (which accounts for space taken by match | twolfson_json-content-demux | train | js |
8c40c1d6b6af5a5f72d4cd3d1aae7ae54052fa98 | diff --git a/lib/oxidized/model/fortios.rb b/lib/oxidized/model/fortios.rb
index <HASH>..<HASH> 100644
--- a/lib/oxidized/model/fortios.rb
+++ b/lib/oxidized/model/fortios.rb
@@ -2,7 +2,7 @@ class FortiOS < Oxidized::Model
comment '# '
- prompt /^([-\w\.]+(\s[\(\w\-\.\)]+)?\~?\s?[#>]\s?)$/
+ prompt /^([-\w\.]+(\s[\(\w\-\.\)]+)?\~?\s?[#>$]\s?)$/
expect /^--More--\s$/ do |data, re|
send ' ' | Fixed the fortios prompt to support a read-only user | ytti_oxidized | train | rb |
f88c3df18d0f066a91af55ff790e25f6e91e739f | diff --git a/src/UrlHandlers/ClassMappedUrlHandler.php b/src/UrlHandlers/ClassMappedUrlHandler.php
index <HASH>..<HASH> 100644
--- a/src/UrlHandlers/ClassMappedUrlHandler.php
+++ b/src/UrlHandlers/ClassMappedUrlHandler.php
@@ -22,6 +22,10 @@ class ClassMappedUrlHandler extends UrlHandler
{
private $className = "";
+ /**
+ * @param string $className
+ * @param array $children
+ */
public function __construct($className, $children = [])
{
parent::__construct($children); | PhpStorm infers type of $className as an array from the parent constructor if we don't specifically mark that it isn't, which causes inspector warnings when the handlers are constructed with strings for the first param | RhubarbPHP_Rhubarb | train | php |
4771aa259fb412cc65832d930a329b127a4a8696 | diff --git a/angr/analyses/ddg.py b/angr/analyses/ddg.py
index <HASH>..<HASH> 100644
--- a/angr/analyses/ddg.py
+++ b/angr/analyses/ddg.py
@@ -1086,9 +1086,9 @@ class DDG(Analysis):
# Group all dependencies first
simrun_addr_to_func = { }
- for _, func in self._cfg.function_manager.functions.iteritems():
+ for _, func in self.kb.functions.iteritems():
for block in func.blocks:
- simrun_addr_to_func[block] = func
+ simrun_addr_to_func[block.addr] = func
for src, dst, data in self.graph.edges_iter(data=True):
src_target_func = None | Fixup of knowledge_base API changes, in '_build_function_dependency_graphs' | angr_angr | train | py |
6b8c051efbb6cf8a02e4641d747dce899e0de73d | diff --git a/src/kba/pipeline/_language.py b/src/kba/pipeline/_language.py
index <HASH>..<HASH> 100644
--- a/src/kba/pipeline/_language.py
+++ b/src/kba/pipeline/_language.py
@@ -18,8 +18,10 @@ def language(config):
def _language(si, context):
if si.body and si.body.raw:
name, code, is_reliable, num_text_bytes, details = cld.detect(si.body.raw)
- if is_reliable:
+ if is_reliable and code != 'xxx':
si.body.language = Language(code=code, name=name)
+ else:
+ si.body.language = Language(code='', name='')
return si
return _language | making "language" stage always create si.body.language even if empty | trec-kba_streamcorpus-pipeline | train | py |
0bfdd1ff7150df62c2927334bba4f2d73e1318fa | diff --git a/packages/cli/lib/lib/webpack/push-manifest.js b/packages/cli/lib/lib/webpack/push-manifest.js
index <HASH>..<HASH> 100644
--- a/packages/cli/lib/lib/webpack/push-manifest.js
+++ b/packages/cli/lib/lib/webpack/push-manifest.js
@@ -7,7 +7,7 @@ module.exports = class PushManifestPlugin {
if (!/\.map$/.test(filename)) {
if (/route-/.test(filename)) {
routes.push(filename);
- } else if (/^style(.+)\.css$/.test(filename)) {
+ } else if (/^bundle(.+)\.css$/.test(filename)) {
mainCss = filename;
} else if (/^bundle(.+)\.js$/.test(filename)) {
mainJs = filename; | fix main CSS filename in `push-manifest` | developit_preact-cli | train | js |
e19183fe521b0711075c293dc004189b4018161c | diff --git a/src/PhpCollection/AbstractMap.php b/src/PhpCollection/AbstractMap.php
index <HASH>..<HASH> 100644
--- a/src/PhpCollection/AbstractMap.php
+++ b/src/PhpCollection/AbstractMap.php
@@ -168,6 +168,10 @@ class AbstractMap extends AbstractCollection implements \IteratorAggregate, MapI
return $this->filterInternal($callable, false);
}
+ /**
+ * @param callable $callable
+ * @param boolean $booleanKeep
+ */
private function filterInternal($callable, $booleanKeep)
{
$newElements = array(); | Scrutinizer Auto-Fixes
This commit consists of patches automatically generated for this project on <URL> | schmittjoh_php-collection | train | php |
32e9661f7c35504527358f876c6ce59c635589ae | diff --git a/azure-mgmt-network/src/main/java/com/microsoft/azure/management/network/implementation/NetworkImpl.java b/azure-mgmt-network/src/main/java/com/microsoft/azure/management/network/implementation/NetworkImpl.java
index <HASH>..<HASH> 100644
--- a/azure-mgmt-network/src/main/java/com/microsoft/azure/management/network/implementation/NetworkImpl.java
+++ b/azure-mgmt-network/src/main/java/com/microsoft/azure/management/network/implementation/NetworkImpl.java
@@ -142,7 +142,13 @@ class NetworkImpl
@Override
public List<String> addressSpaces() {
- return Collections.unmodifiableList(this.inner().addressSpace().addressPrefixes());
+ if (this.inner().addressSpace() == null) {
+ return null;
+ } else if(this.inner().addressSpace().addressPrefixes() == null) {
+ return null;
+ } else {
+ return Collections.unmodifiableList(this.inner().addressSpace().addressPrefixes());
+ }
}
@Override | fix for potential NPE in NetworkImpl#addressSpaces() | Azure_azure-sdk-for-java | train | java |
ac17355524697d4c16cb81916bac644eb1adc093 | diff --git a/blocks/course_list/block_course_list.php b/blocks/course_list/block_course_list.php
index <HASH>..<HASH> 100644
--- a/blocks/course_list/block_course_list.php
+++ b/blocks/course_list/block_course_list.php
@@ -68,7 +68,6 @@ class block_course_list extends block_list {
$this->content->items[]="<a $linkcss href=\"$CFG->wwwroot/course/category.php?id=$category->id\">" . format_string($category->name) . "</a>";
$this->content->icons[]=$icon;
}
- $this->content->footer .= "<a href=\"$CFG->wwwroot/course/index.php\">".get_string('searchcourses').'</a> ...<br />';
/// If we can update any course of the view all isn't hidden, show the view all courses link
if (has_capability('moodle/course:update', get_context_instance(CONTEXT_SYSTEM)) || empty($CFG->block_course_list_hideallcourseslink)) {
$this->content->footer .= "<a href=\"$CFG->wwwroot/course/index.php\">".get_string('fulllistofcourses').'</a> ...'; | MDL-<I> - When we have more than one category/lots of courses the course
block shows two links next to search other (Search Courses/All courses)
which point to the same place....
The all courses link is able to be switched off and the search courses is not,
for this reason i've gone for removing the search courses link.
Merged from MOODLE_<I>_STABLE | moodle_moodle | train | php |
faab79429333de89ba709241eda26bff665b3622 | diff --git a/gwpy/timeseries/tests/test_io_gwf_lalframe.py b/gwpy/timeseries/tests/test_io_gwf_lalframe.py
index <HASH>..<HASH> 100644
--- a/gwpy/timeseries/tests/test_io_gwf_lalframe.py
+++ b/gwpy/timeseries/tests/test_io_gwf_lalframe.py
@@ -61,9 +61,7 @@ def _test_open_data_source(source):
"""
stream = gwpy_lalframe.open_data_source(source)
assert stream.epoch == TEST_GWF_SEGMENT[0]
- assert TEST_GWF_PATH == Path(
- urlparse(stream.cache.list.url).path
- ).absolute()
+ assert Path(urlparse(stream.cache.list.url).path).samefile(TEST_GWF_PATH)
@pytest.mark.parametrize("source", [ | gwpy.timeseries: use samefile to compare paths
which unwraps symlinks properly, closes #<I> | gwpy_gwpy | train | py |
ef2d14fe17b2549670895b2d25207f17b8362534 | diff --git a/bcbio/pipeline/qcsummary.py b/bcbio/pipeline/qcsummary.py
index <HASH>..<HASH> 100644
--- a/bcbio/pipeline/qcsummary.py
+++ b/bcbio/pipeline/qcsummary.py
@@ -321,6 +321,8 @@ def _run_coverage_qc(bam_file, data, out_dir):
if "Duplicates" in samtools_stats:
out['Duplicates'] = dups = int(samtools_stats["Duplicates"])
out['Duplicates_pct'] = 100.0 * dups / int(samtools_stats["Mapped_reads_raw"])
+ else:
+ dups = 0
if dd.get_coverage(data):
cov_bed_file = clean_file(dd.get_coverage(data), data, prefix="cov-", simple=True)
@@ -333,9 +335,9 @@ def _run_coverage_qc(bam_file, data, out_dir):
merged_bed_file = None
target_name = "genome"
- # Whole genome runs do not need detailed on-target calculations, use total mapped
+ # Whole genome runs do not need detailed on-target calculations, use total unique mapped
if dd.get_coverage_interval(data) == "genome":
- mapped_unique = mapped
+ mapped_unique = mapped - dups
else:
out['Mapped_unique_reads'] = mapped_unique = sambamba.number_of_mapped_reads(data, bam_file, keep_dups=False) | QC: remove duplicates for WGS unique mapped | bcbio_bcbio-nextgen | train | py |
e81cebd74bc518051434cbdd5bb2874f211b45fc | diff --git a/js/core/Base.js b/js/core/Base.js
index <HASH>..<HASH> 100644
--- a/js/core/Base.js
+++ b/js/core/Base.js
@@ -147,6 +147,8 @@ define(["inherit"], function(inherit){
self.log(e, Base.LOGLEVEL.ERROR);
}
}
+
+ obj.callbacks = [];
});
} | free memory after synchronizeFunctionCall returned | rappid_rAppid.js | train | js |
b41e878a033369b4111ba5c71a1ed7e9872593e1 | diff --git a/AssetManager/NpmManager.php b/AssetManager/NpmManager.php
index <HASH>..<HASH> 100644
--- a/AssetManager/NpmManager.php
+++ b/AssetManager/NpmManager.php
@@ -39,7 +39,7 @@ class NpmManager extends AbstractAssetManager
*/
public function getVersionCommand()
{
- return 'npm --version';
+ return $this->buildCommand('npm', 'version', '--version');
}
/**
diff --git a/AssetManager/YarnManager.php b/AssetManager/YarnManager.php
index <HASH>..<HASH> 100644
--- a/AssetManager/YarnManager.php
+++ b/AssetManager/YarnManager.php
@@ -39,7 +39,7 @@ class YarnManager extends AbstractAssetManager
*/
public function getVersionCommand()
{
- return 'yarn --version';
+ return $this->buildCommand('yarn', 'version', '--version');
}
/** | Use the custom binary of asset manager to retrieve the version | fxpio_foxy | train | php,php |
0e0e999a4062dae23692984f32f10f49780f30db | diff --git a/src/main/java/guru/nidi/graphviz/engine/Rasterizer.java b/src/main/java/guru/nidi/graphviz/engine/Rasterizer.java
index <HASH>..<HASH> 100644
--- a/src/main/java/guru/nidi/graphviz/engine/Rasterizer.java
+++ b/src/main/java/guru/nidi/graphviz/engine/Rasterizer.java
@@ -22,7 +22,6 @@ import java.util.function.Consumer;
public interface Rasterizer {
Rasterizer BATIK = new BatikRasterizer();
Rasterizer SALAMANDER = new SalamanderRasterizer();
- Rasterizer XDOT = new XdotRasterizer();
Format format(); | moved xdot rasterizer to branch | nidi3_graphviz-java | train | java |
e6af8bfc38f8fbe92afdd859b7e1d9702f1c928e | diff --git a/cassandra/cluster.py b/cassandra/cluster.py
index <HASH>..<HASH> 100644
--- a/cassandra/cluster.py
+++ b/cassandra/cluster.py
@@ -643,6 +643,8 @@ class Cluster(object):
raise Exception("Cluster is already shut down")
if not self._is_setup:
+ log.debug("Connecting to cluster, contact points: %s; protocol version: %s",
+ self.contact_points, self.protocol_version)
self.connection_class.initialize_reactor()
atexit.register(partial(_shutdown_cluster, self))
for address in self.contact_points: | Log contact points and protocol version in Cluster.connect() | datastax_python-driver | train | py |
c88b81732c39656aa41ca3fa9b5f01eec18231da | diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -107,7 +107,7 @@ Filter.prototype.build = function() {
var entries = walkSync.entries(srcDir);
var walkDuration = timeSince(walkStart);
- var nextTree = new FSTree.fromEntries(entries);
+ var nextTree = FSTree.fromEntries(entries);
var currentTree = this.currentTree;
this.currentTree = nextTree; | FSTree.fromEntries should not be new’d | stefanpenner_broccoli-persistent-filter | train | js |
602a8cdefb1fc6cc2150d6a0475f704be91fbac7 | diff --git a/spec/lib/daemons/application_spec.rb b/spec/lib/daemons/application_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/lib/daemons/application_spec.rb
+++ b/spec/lib/daemons/application_spec.rb
@@ -2,9 +2,10 @@ require 'spec_helper'
module Daemons
describe Application do
- subject(:application) { described_class.new group }
+ subject(:application) { described_class.new group, additional_options }
- let(:group) { ApplicationGroup.new 'my_app' }
+ let(:app_name) { 'my_app' }
+ let(:group) { ApplicationGroup.new app_name }
let(:options) { Hash.new }
let(:log_dir) { nil }
let(:dir_mode) { nil }
@@ -17,6 +18,9 @@ module Daemons
group: group
}
}
+ let(:additional_options) {
+ {}
+ }
before do
allow(group)
@@ -130,5 +134,25 @@ module Daemons
end
end
end
+
+ describe '#output_logfilename' do
+ subject { application.output_logfilename }
+
+ context 'when an output_logfilename is specified' do
+ let(:output_logfilename) {
+ 'logname.log'
+ }
+ let(:additional_options) {
+ { output_logfilename: output_logfilename }
+ }
+
+ it { is_expected.to eq output_logfilename }
+ end
+
+ context 'when an output_logfilename is NOT specified' do
+ it { is_expected.to eq app_name + '.output' }
+ end
+ end
+
end
end | Add coverage for Application#output_logfilename | thuehlinger_daemons | train | rb |
144bd9f7a7aba7c86ef0fc0d7e1e620d1d658edc | diff --git a/prom/__init__.py b/prom/__init__.py
index <HASH>..<HASH> 100644
--- a/prom/__init__.py
+++ b/prom/__init__.py
@@ -23,7 +23,7 @@ from .exception import InterfaceError, Error, UniqueError
from . import utils
-__version__ = '2.4.0'
+__version__ = '2.4.1'
# get rid of "No handler found" warnings (cribbed from requests)
diff --git a/prom/query.py b/prom/query.py
index <HASH>..<HASH> 100644
--- a/prom/query.py
+++ b/prom/query.py
@@ -4,7 +4,7 @@ Classes and stuff that handle querying the interface for a passed in Orm class
"""
from __future__ import unicode_literals, division, print_function, absolute_import
import copy
-from collections import defaultdict, Mapping, OrderedDict
+from collections import defaultdict, OrderedDict
import datetime
import logging
import os | <I>. Removes rogue import that has been removed in python <I> | Jaymon_prom | train | py,py |
59bfc9a67dc8d8a0b14c7d10cd1fea37124fd964 | diff --git a/java/src/com/google/template/soy/passes/SoyElementPass.java b/java/src/com/google/template/soy/passes/SoyElementPass.java
index <HASH>..<HASH> 100644
--- a/java/src/com/google/template/soy/passes/SoyElementPass.java
+++ b/java/src/com/google/template/soy/passes/SoyElementPass.java
@@ -180,7 +180,7 @@ public final class SoyElementPass implements CompilerFileSetPass {
// If the template is a static call, then it may be a Soy element if it's the last child.
// TODO(tomnguyen): Merge this logic with velog validation pass.
- // TODO(cwgordon): There is no way to make guarantees about the root element of a dynamic
+ // TODO(user): There is no way to make guarantees about the root element of a dynamic
// call. Consider adding some way to indicate this constraint in template type declarations.
if (openTag == null
&& child instanceof CallBasicNode | Update TODO username.
We switched to a different version of MOE, which apparently doesn't have this username on the allowlist. | google_closure-templates | train | java |
6621194109d3e79a4d8fe8933614a9160ec0dc7e | diff --git a/cartoframes/dataset.py b/cartoframes/dataset.py
index <HASH>..<HASH> 100644
--- a/cartoframes/dataset.py
+++ b/cartoframes/dataset.py
@@ -175,7 +175,7 @@ class Dataset(object):
def _cartodbfy_query(self):
return "SELECT CDB_CartodbfyTable('{schema}', '{table_name}')" \
- .format(schema=self.schema or self.cc.get_default_schema(), table_name=self.table_name)
+ .format(schema=self.schema or self._get_schema(), table_name=self.table_name)
def _copyfrom(self, with_lnglat=None):
geom_col = _get_geom_col_name(self.df)
@@ -348,8 +348,8 @@ class Dataset(object):
def _get_schema(self):
if self.cc:
return self.cc.get_default_schema()
- else:
- return 'public'
+
+ return None
def recursive_read(context, query, retry_times=Dataset.DEFAULT_RETRY_TIMES): | the schema should be None in dataset without context | CartoDB_cartoframes | train | py |
69cb751f7166ae59ed5473d74f0f4ca286cd1bf7 | diff --git a/python_modules/dagster-test/dagster_test/toys/schedules.py b/python_modules/dagster-test/dagster_test/toys/schedules.py
index <HASH>..<HASH> 100644
--- a/python_modules/dagster-test/dagster_test/toys/schedules.py
+++ b/python_modules/dagster-test/dagster_test/toys/schedules.py
@@ -70,7 +70,7 @@ def backfill_test_schedule():
partition_fn=date_partition_range(
# first sunday of the year
start=datetime.datetime(2020, 1, 5),
- delta=datetime.timedelta(weeks=1),
+ delta_range="weeks",
),
run_config_fn_for_partition=lambda _: {"storage": {"filesystem": {}}},
) | [easy] Fix missing param change in date_partition_range call
Summary: I Missed a spot, and toys repo doesn't seem to be covered by automated tests. Noticed when trying to load dagit on the toys repo.
Test Plan: Load dagit with top-level workspace.yaml, no more error. Check all other date_partition_range callsites
Reviewers: prha, alangenfeld, sashank, johann
Reviewed By: prha
Differential Revision: <URL> | dagster-io_dagster | train | py |
8773c467d185c5cacac1b12196235ab76775f462 | diff --git a/hazelcast/src/test/java/com/hazelcast/internal/management/operation/GetMapConfigOperationTest.java b/hazelcast/src/test/java/com/hazelcast/internal/management/operation/GetMapConfigOperationTest.java
index <HASH>..<HASH> 100644
--- a/hazelcast/src/test/java/com/hazelcast/internal/management/operation/GetMapConfigOperationTest.java
+++ b/hazelcast/src/test/java/com/hazelcast/internal/management/operation/GetMapConfigOperationTest.java
@@ -89,7 +89,7 @@ public class GetMapConfigOperationTest extends HazelcastTestSupport {
MCGetMapConfigCodec.ResponseParameters actual = runCommand(client, hz, "map-with-index").get(ASSERT_TRUE_EVENTUALLY_TIMEOUT, SECONDS);
assertThat(actual.globalIndexes)
.usingElementComparatorIgnoringFields("name")
- .containsExactly(
+ .containsExactlyInAnyOrder(
new IndexConfig(IndexType.SORTED, "first"),
new IndexConfig(IndexType.HASH, "second")
); | Fixing #<I> (#<I>) | hazelcast_hazelcast | train | java |
e4a95fcbaf28e6adba47d7497e52f4a4d88af590 | diff --git a/src/server/pkg/ppsutil/util.go b/src/server/pkg/ppsutil/util.go
index <HASH>..<HASH> 100644
--- a/src/server/pkg/ppsutil/util.go
+++ b/src/server/pkg/ppsutil/util.go
@@ -391,7 +391,7 @@ func IsTerminal(state pps.JobState) bool {
// UpdateJobState performs the operations involved with a job state transition.
func UpdateJobState(pipelines col.ReadWriteCollection, jobs col.ReadWriteCollection, jobPtr *pps.EtcdJobInfo, state pps.JobState, reason string) error {
if jobPtr.State == pps.JobState_JOB_FAILURE {
- return nil
+ return fmt.Errorf("cannot put %q in state %s as it's already in state JOB_FAILURE", jobPtr.Job.ID, state.String())
}
// Update pipeline | Return error when updating a failed job | pachyderm_pachyderm | train | go |
774343812000dcd443b666df070e6c9a4cebbd45 | diff --git a/mockserver-netty/src/test/java/org/mockserver/proxy/socks/NettyHttpProxySOCKSIntegrationTest.java b/mockserver-netty/src/test/java/org/mockserver/proxy/socks/NettyHttpProxySOCKSIntegrationTest.java
index <HASH>..<HASH> 100644
--- a/mockserver-netty/src/test/java/org/mockserver/proxy/socks/NettyHttpProxySOCKSIntegrationTest.java
+++ b/mockserver-netty/src/test/java/org/mockserver/proxy/socks/NettyHttpProxySOCKSIntegrationTest.java
@@ -131,6 +131,7 @@ public class NettyHttpProxySOCKSIntegrationTest {
}
@Test
+ @Ignore("Fails only on drone.io, runs correctly on travis and multiple local machines")
public void shouldProxyRequestsUsingHttpClientViaSOCKS() throws Exception {
// given
Registry<ConnectionSocketFactory> socketFactoryRegistry = RegistryBuilder.<ConnectionSocketFactory>create() | disabling test that fails only on drone.io, but runs correctly on travis and multiple local machines | jamesdbloom_mockserver | train | java |
2098307f6992521d0c401b0945fc2e0ceab050a1 | diff --git a/gui/controls/listview.py b/gui/controls/listview.py
index <HASH>..<HASH> 100644
--- a/gui/controls/listview.py
+++ b/gui/controls/listview.py
@@ -136,7 +136,7 @@ class ListView(Control):
return
# calculate the zero-based index position (-1 like python lists)
index = max(self.get_count() + index + 1, 0) if index < 0 else index
- if isinstance(a_list, DictType):
+ if isinstance(a_list, dict):
for i, (key, a_item) in enumerate(a_list.items()):
self._items.add(index + i, key, a_item)
else:
@@ -152,7 +152,7 @@ class ListView(Control):
return self._items
def _set_items(self, a_list):
- if isinstance(a_list, NoneType):
+ if a_list is None:
a_list = []
elif not isinstance(a_list, (list, tuple, dict)):
raise AttributeError("unsupported type, list/tuple/dict expected") | fixed types import removal (preparation for 2to3) | reingart_gui2py | train | py |
068f880ce219fcc04adcd7f4a8567daa946c8307 | diff --git a/vcard.php b/vcard.php
index <HASH>..<HASH> 100644
--- a/vcard.php
+++ b/vcard.php
@@ -185,7 +185,7 @@ class VCard
$string .= "BEGIN:VEVENT\n";
$string .= "DTSTART;TZID=Europe/London:" . $dtstart . "\n";
$string .= "DTEND;TZID=Europe/London:" . $dtend . "\n";
- $string .= "SUMMARY:" . FL::msg('VCardSummaryHelp') . "\n"; // Click attached contact below to save to your contacts
+ $string .= "SUMMARY:Click attached contact below to save to your contacts\n";
$string .= "DTSTAMP:" . $dtstart . "Z\n";
$string .= "ATTACH;VALUE=BINARY;ENCODING=BASE64;FMTTYPE=text/directory;\n";
$string .= " X-APPLE-FILENAME=" . $this->filename . ".vcf:\n"; | Bugfix #1: non-declared class FL removed | jeroendesloovere_vcard | train | php |
Subsets and Splits