hash
stringlengths 40
40
| diff
stringlengths 131
26.7k
| message
stringlengths 7
694
| project
stringlengths 5
67
| split
stringclasses 1
value | diff_languages
stringlengths 2
24
|
---|---|---|---|---|---|
7723dde337cea0a01ad5383b14216362c51289e3 | diff --git a/AbstractBundleTestCase.php b/AbstractBundleTestCase.php
index <HASH>..<HASH> 100644
--- a/AbstractBundleTestCase.php
+++ b/AbstractBundleTestCase.php
@@ -68,7 +68,7 @@ abstract class AbstractBundleTestCase extends AbstractContainerBuilderTestCase
$extension = $this->bundle->getContainerExtension();
if (null === $extension) {
- throw new \LogicException(sprintf('Bundle "%s" does not have an extension.', get_class($this->bundle)));
+ throw new \LogicException(\sprintf('Bundle "%s" does not have an extension.', \get_class($this->bundle)));
}
$this->container->registerExtension($extension); | [Ruwork][PHPCS] native_function_invocation=true. | ruvents_ruwork-bundle-test | train | php |
0f2d85f0b2d556f2b6c70423dcebdbd29d95e3dc | diff --git a/lib/index.js b/lib/index.js
index <HASH>..<HASH> 100644
--- a/lib/index.js
+++ b/lib/index.js
@@ -133,7 +133,7 @@ class Service extends AdapterService {
}, params.sequelize);
if (filters.$select) {
- q.attributes = filters.$select;
+ q.attributes = filters.$select.map(select => `${select}`);
}
const Model = this.applyScope(params);
diff --git a/test/index.test.js b/test/index.test.js
index <HASH>..<HASH> 100644
--- a/test/index.test.js
+++ b/test/index.test.js
@@ -293,6 +293,12 @@ describe('Feathers Sequelize Service', () => {
await people.remove(person.id);
});
+ it('does not allow raw attribute $select ', async () => {
+ await assert.rejects(() => people.find({
+ query: { $select: [['(sqlite_version())', 'x']] }
+ }));
+ });
+
it('hides the Sequelize error in ERROR symbol', async () => {
try {
await people.create({ | Do not allow raw attribute selects (#<I>) | feathersjs-ecosystem_feathers-sequelize | train | js,js |
f635d17030b60b0d710a2322d12e5390c655b19d | diff --git a/pandas/core/ops/docstrings.py b/pandas/core/ops/docstrings.py
index <HASH>..<HASH> 100644
--- a/pandas/core/ops/docstrings.py
+++ b/pandas/core/ops/docstrings.py
@@ -611,7 +611,7 @@ Get {desc} of dataframe and other, element-wise (binary operator `{op_name}`).
Among flexible wrappers (`eq`, `ne`, `le`, `lt`, `ge`, `gt`) to comparison
operators.
-Equivalent to `==`, `=!`, `<=`, `<`, `>=`, `>` with support to choose axis
+Equivalent to `==`, `!=`, `<=`, `<`, `>=`, `>` with support to choose axis
(rows or columns) and level for comparison.
Parameters | DOC: Fix typo of `=!` to `!=` in docstring (#<I>)
This fixes GH<I>. | pandas-dev_pandas | train | py |
8a58f62fe03e744ba75fcdd5fbd1f057c84ff4d7 | diff --git a/pkg/proxy/userspace/proxier.go b/pkg/proxy/userspace/proxier.go
index <HASH>..<HASH> 100644
--- a/pkg/proxy/userspace/proxier.go
+++ b/pkg/proxy/userspace/proxier.go
@@ -166,9 +166,15 @@ func NewCustomProxier(loadBalancer LoadBalancer, listenIP net.IP, iptables iptab
return nil, ErrProxyOnLocalhost
}
- hostIP, err := utilnet.ChooseHostInterface()
- if err != nil {
- return nil, fmt.Errorf("failed to select a host interface: %v", err)
+ // If listenIP is given, assume that is the intended host IP. Otherwise
+ // try to find a suitable host IP address from network interfaces.
+ var err error
+ hostIP := listenIP
+ if hostIP.Equal(net.IPv4zero) {
+ hostIP, err = utilnet.ChooseHostInterface()
+ if err != nil {
+ return nil, fmt.Errorf("failed to select a host interface: %v", err)
+ }
}
err = setRLimit(64 * 1000) | proxy/userspace: honor listen IP address as host IP if given
Allows the proxier to be used on an interface that's not the default route,
otherwise hostIP gets set to the default route interface even if that's
not what the user intended.
If listen IP isn't given, falls back to previous behavior. | kubernetes_kubernetes | train | go |
3fd54cf8629456dc3c6c77e04bf44da6195acf3e | diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -59,7 +59,7 @@ module.exports = function(options) {
var response = {
version: req.body.version,
- sessionAttributes: sessionAttributes,
+ sessionAttributes: (sessionAttributes || req.body.session.attributes),
response: {
shouldEndSession: options.shouldEndSession
} | Added old session attributes for backward compat | mattgj_alexa-skills | train | js |
86fe1b944d31dda0d5fb5bd236d0c48f46107fa9 | diff --git a/src/sos/Julia/kernel.py b/src/sos/Julia/kernel.py
index <HASH>..<HASH> 100644
--- a/src/sos/Julia/kernel.py
+++ b/src/sos/Julia/kernel.py
@@ -235,7 +235,7 @@ end
class sos_Julia:
- background_color = '#ff99ff'
+ background_color = '#FCEDF4'
supported_kernels = {'Julia': ['julia-0.6']}
options = {
'assignment_pattern': r'^([_A-Za-z0-9\.]+)\s*=.*$' | change the color of julia in jupyter notebook | vatlab_SoS | train | py |
024b9236cbdb7c86b5cfcceb09e4a0f846cb0a67 | diff --git a/PyPump.py b/PyPump.py
index <HASH>..<HASH> 100644
--- a/PyPump.py
+++ b/PyPump.py
@@ -102,6 +102,31 @@ class PyPump(object):
return self.feed(post)
+ def unfollow(self, nickname):
+ """ This will use the api/user/<nickname>/feed endpoint to make a unfollow activity
+ This will unfollow a user if you're following them (returning a True if you have
+ unfollowed them successfully and a False if it failed (maybe because you're not following them))
+ """
+
+ if not "@" in nickname:
+ nickname = "%s@%s" % (nickname, self.server)
+
+ # all id's for this need the acct: prefix
+ if not nickname.startswith("acct:"):
+ nickname = "acct:%s" % nickname
+
+ post = {
+ "verb":"stop-following",
+ "object":{
+ "objectType":"person",
+ "id":nickname
+ }
+ }
+
+ data = self.feed(post)
+
+ return data
+
def feed(self, data=""):
""" This uses the /api/user/<nickname>/feed endpoint. | Add unfollowing - yet to be tested | xray7224_PyPump | train | py |
b033624fab021bf5b735035032408c8d3d71f957 | diff --git a/functional/PHPUnitTest.php b/functional/PHPUnitTest.php
index <HASH>..<HASH> 100644
--- a/functional/PHPUnitTest.php
+++ b/functional/PHPUnitTest.php
@@ -189,7 +189,20 @@ class PHPUnitTest extends FunctionalTestBase
$proc = $this->paratestProc(array(
'bootstrap' => BOOTSTRAP
), $pipes);
- $this->assertContains('Call to undefined function inexistent', stream_get_contents($pipes[2]));
+ $stderr = stream_get_contents($pipes[2]);
+ $this->assertContains('Call to undefined function inexistent', $stderr);
+ }
+
+ public function testRunWithFatalRuntimeErrorWithTheWrapperRunnerOutputsError()
+ {
+ $this->path = FIXTURES . DS . 'fatal-tests' . DS . 'UnitTestWithFatalFunctionErrorTest.php';
+ $pipes = array();
+ $proc = $this->paratestProc(array(
+ 'bootstrap' => BOOTSTRAP,
+ 'runner' => 'WrapperRunner'
+ ), $pipes);
+ $stderr = stream_get_contents($pipes[2]);
+ $this->assertContains('Call to undefined function inexistent', $stderr);
}
public function testRunWithoutPathArgumentDisplaysUsage() | WrapperRunner displays error in case of Fatal | paratestphp_paratest | train | php |
b6c2f54acfd06d972102f693fbb90cc2aeac5f07 | diff --git a/libact/models/sklearn_adapter.py b/libact/models/sklearn_adapter.py
index <HASH>..<HASH> 100644
--- a/libact/models/sklearn_adapter.py
+++ b/libact/models/sklearn_adapter.py
@@ -10,6 +10,28 @@ class SklearnAdapter(Model):
----------
clf : scikit-learn classifier object instance
The classifier object that is intended to be use with libact
+
+ Examples
+ --------
+ Here is an example of using SklearnAdapter to classify the iris dataset:
+
+ .. code-block:: python
+ from sklearn import datasets
+ from sklearn.cross_validation import train_test_split
+ from sklearn.linear_model import LogisticRegression
+
+ from libact.base.dataset import Dataset
+ from libact.models import SklearnAdapter
+
+ iris = datasets.load_iris()
+ X = iris.data
+ y = iris.target
+ X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3)
+
+ adapter = SklearnAdapter(LogisticRegression(random_state=1126))
+
+ adapter.train(Dataset(X_train, y_train))
+ adapter.predict(X_test)
"""
def __init__(self, clf): | added example for sklearn adapter | ntucllab_libact | train | py |
6dec1983fe83afbb15ba921d4cd36c83fa464637 | diff --git a/utool/util_str.py b/utool/util_str.py
index <HASH>..<HASH> 100755
--- a/utool/util_str.py
+++ b/utool/util_str.py
@@ -1746,7 +1746,8 @@ def list_str(list_, **listkw):
def _make_valstr(**kwargs):
import utool as ut
- strvals = kwargs.get('sv', kwargs.get('strvals', False))
+ stritems = kwargs.get('si', kwargs.get('stritems', False))
+ strvals = stritems or kwargs.get('sv', kwargs.get('strvals', False))
valfunc = six.text_type if strvals else reprfunc
if not kwargs.get('recursive', True): | fixed bug where si not cause sv to be True in repr2 | Erotemic_utool | train | py |
dd95fc0bb4ba59785fc4e0224b52d8f4649599b5 | diff --git a/h2o-algos/src/main/java/hex/deeplearning/DeepLearningModel.java b/h2o-algos/src/main/java/hex/deeplearning/DeepLearningModel.java
index <HASH>..<HASH> 100644
--- a/h2o-algos/src/main/java/hex/deeplearning/DeepLearningModel.java
+++ b/h2o-algos/src/main/java/hex/deeplearning/DeepLearningModel.java
@@ -1384,7 +1384,7 @@ public class DeepLearningModel extends SupervisedModel<DeepLearningModel,DeepLea
try {
final long now = System.currentTimeMillis();
epoch_counter = (float)model_info().get_processed_total()/train.numRows();
- final double time_last_iter_millis = now-_timeLastScoreEnter;
+ final double time_last_iter_millis = Math.max(1, now-_timeLastScoreEnter); //at least 1 msec
// Auto-tuning
// if multi-node and auto-tuning and at least 10 ms for communication (to avoid doing thins on multi-JVM on same node), | Set the time for one DL MR iteration to at least 1 ms. | h2oai_h2o-3 | train | java |
b9dbdc21f0cb53a8c1c27416c3bd25dafc0863d6 | diff --git a/tests/ClientTest.php b/tests/ClientTest.php
index <HASH>..<HASH> 100644
--- a/tests/ClientTest.php
+++ b/tests/ClientTest.php
@@ -84,6 +84,8 @@ class ClientTest extends TestCase
$client->request($inputRequest, [], true);
+ self::assertInstanceOf(RequestInterface::class, $request);
+
$this->assertSame($outputRequest->getMethod(), $request->getMethod());
$this->assertSame((string) $outputRequest->getUri(), (string) $request->getUri()); | Ensure $request is really a request | php-api-clients_transport | train | php |
8d728a4294f5ff44a41e7c010633248b86a6d85e | diff --git a/src/LfmPath.php b/src/LfmPath.php
index <HASH>..<HASH> 100644
--- a/src/LfmPath.php
+++ b/src/LfmPath.php
@@ -153,7 +153,7 @@ class LfmPath
$working_dir = $this->path('working_dir');
$parent_dir = substr($working_dir, 0, strrpos($working_dir, '/'));
- $parent_path = clone $this;
+ $parent_path = app(static::class);
$parent_path->dir($parent_dir)->setName(null);
$directories = $parent_path->directories(); | fix bug with isDirectory() in LfmPath | UniSharp_laravel-filemanager | train | php |
a63add501a3b0271c323cea9c091a7ef79d38ed5 | diff --git a/test/k8sT/DatapathConfiguration.go b/test/k8sT/DatapathConfiguration.go
index <HASH>..<HASH> 100644
--- a/test/k8sT/DatapathConfiguration.go
+++ b/test/k8sT/DatapathConfiguration.go
@@ -581,6 +581,19 @@ var _ = Describe("K8sDatapathConfig", func() {
testWireguard("cilium_vxlan")
})
+
+ It("Pod2pod is encrypted in tunneling mode with per-endpoint routes", func() {
+ deploymentManager.DeployCilium(map[string]string{
+ "tunnel": "vxlan",
+ "endpointRoutes.enabled": "true",
+ "encryption.enabled": "true",
+ "encryption.type": "wireguard",
+ "l7Proxy": "false",
+ }, DeployCiliumOptionsAndDNS)
+
+ testWireguard("cilium_vxlan")
+ })
+
})
Context("Sockops performance", func() { | test: Run WG with per-endpoint routes
Just to have confidence that nothing is broken with this setup. | cilium_cilium | train | go |
12375014bba4d79fcafe64e4499beba62a9651c1 | diff --git a/src/Models/Transaction.php b/src/Models/Transaction.php
index <HASH>..<HASH> 100644
--- a/src/Models/Transaction.php
+++ b/src/Models/Transaction.php
@@ -179,6 +179,13 @@ class Transaction
// mutate transaction (morph specialized class)
$classTx = self::$typesClassMap[$type];
+
+ if ($type === TransactionType::TRANSFER
+ && in_array($data["version"], [self::VERSION_2, self::VERSION_2_TEST, self::VERSION_2_MIJIN])) {
+
+ $classTx = "\\NEM\\Models\\Transaction\\MosaicTransfer";
+ }
+
return new $classTx($data);
} | add recognition for Transaction::create() to work with v2 transfers when explicitely told so | evias_nem-php | train | php |
ebe392ee39762086d59fa14a9d996f75edb0537e | diff --git a/inveniocfg_upgrader.py b/inveniocfg_upgrader.py
index <HASH>..<HASH> 100644
--- a/inveniocfg_upgrader.py
+++ b/inveniocfg_upgrader.py
@@ -829,7 +829,7 @@ def cmd_upgrade_check(dummy_conf, upgrader=None):
logger = upgrader.get_logger()
try:
- from invenio.flaskshell import db
+ from invenio.sqlalchemyutils import db
except ImportError:
logger.error("make check-upgrade is unfortunately not supported for "
"non-SQLAlchemy based Invenio installations") | inveniomanage: initial release
* Adds initial implementation of `upgrader_manager` and
`database_manager`.
* Adds new requirement for Flask-Script. (addresses #<I>) | inveniosoftware-attic_invenio-upgrader | train | py |
3e983834e14701ecb96f422f5b6f81eb36181699 | diff --git a/src/Kunstmaan/NodeBundle/Controller/NodeAdminController.php b/src/Kunstmaan/NodeBundle/Controller/NodeAdminController.php
index <HASH>..<HASH> 100644
--- a/src/Kunstmaan/NodeBundle/Controller/NodeAdminController.php
+++ b/src/Kunstmaan/NodeBundle/Controller/NodeAdminController.php
@@ -164,7 +164,7 @@ class NodeAdminController extends Controller
$nodeTranslation = $this->em->getRepository('KunstmaanNodeBundle:NodeTranslation')->createNodeTranslationFor($myLanguagePage, $this->locale, $node, $this->user);
$nodeVersion = $nodeTranslation->getPublicNodeVersion();
- $this->get('event_dispatcher')->dispatch(Events::ADD_EMPTY_PAGE_TRANSLATION, new NodeEvent($node, $nodeTranslation, $nodeVersion, $entityName));
+ $this->get('event_dispatcher')->dispatch(Events::ADD_EMPTY_PAGE_TRANSLATION, new NodeEvent($node, $nodeTranslation, $nodeVersion, $myLanguagePage));
return $this->redirect($this->generateUrl('KunstmaanNodeBundle_nodes_edit', array('id' => $id)));
} | Fix creation of NodeEvent object | Kunstmaan_KunstmaanBundlesCMS | train | php |
55b400c965a75daf4313d929b189561b71986fb0 | diff --git a/devices/tuya.js b/devices/tuya.js
index <HASH>..<HASH> 100644
--- a/devices/tuya.js
+++ b/devices/tuya.js
@@ -214,7 +214,8 @@ module.exports = [
{modelID: 'TS0001', manufacturerName: '_TZ3000_5ng23zjs'},
{modelID: 'TS0001', manufacturerName: '_TZ3000_rmjr4ufz'},
{modelID: 'TS0001', manufacturerName: '_TZ3000_v7gnj3ad'},
- {modelID: 'TS0001', manufacturerName: '_TZ3000_mx3vgyea'}],
+ {modelID: 'TS0001', manufacturerName: '_TZ3000_mx3vgyea'},
+ {modelID: 'TS0001', manufacturerName: '_TZ3000_qsp2pwtf'}],
model: 'WHD02',
vendor: 'TuYa',
description: 'Wall switch module', | Add _TZ<I>_qsp2pwtf to WHD<I> (#<I>) | Koenkk_zigbee-shepherd-converters | train | js |
9f63aa3ab3b1e9dcddd93d646636d33123ab4ec2 | diff --git a/core/src/main/java/io/undertow/protocols/http2/Http2FrameHeaderParser.java b/core/src/main/java/io/undertow/protocols/http2/Http2FrameHeaderParser.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/io/undertow/protocols/http2/Http2FrameHeaderParser.java
+++ b/core/src/main/java/io/undertow/protocols/http2/Http2FrameHeaderParser.java
@@ -134,6 +134,9 @@ class Http2FrameHeaderParser implements FrameHeaderData {
break;
}
case FRAME_TYPE_PRIORITY: {
+ if (streamId == 0) {
+ throw new ConnectionErrorException(Http2Channel.ERROR_PROTOCOL_ERROR, UndertowMessages.MESSAGES.streamIdMustNotBeZeroForFrameType(Http2Channel.FRAME_TYPE_PRIORITY));
+ }
parser = new Http2PriorityParser(length);
break;
} | Added also check for PRIORITY frame type | undertow-io_undertow | train | java |
eaffc7b20c56fe662df2c2179dcbd67b3ab7bcf3 | diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -127,6 +127,11 @@ function parse (input) {
function multipoint () {
if (!$(/^(multipoint)/i)) return null;
white();
+ var newCoordsFormat = _
+ .substring(_.indexOf('(') + 1, _.length - 1)
+ .replace(/\(/g, '')
+ .replace(/\)/g, '');
+ _ = 'MULTIPOINT (' + newCoordsFormat + ')';
var c = multicoords();
if (!c) return null;
white(); | Added support for MultiPoint coordinates encapsulated in parentheses | mapbox_wellknown | train | js |
afef3738cd8b3d91c24e784a287e7c3ca75bf8ce | diff --git a/supervisor_test.go b/supervisor_test.go
index <HASH>..<HASH> 100644
--- a/supervisor_test.go
+++ b/supervisor_test.go
@@ -280,6 +280,15 @@ func TestDoubleStart(t *testing.T) {
go func(i int) {
c := context.WithValue(ctx, "supervisor", i)
supervisor.Serve(c)
+
+ svc1.mu.Lock()
+ count := svc1.count
+ supervisors := svc1.supervisors
+ if count > 1 {
+ t.Error("wait service should have been started once:", count, "supervisor IDs:", supervisors)
+ }
+ svc1.mu.Unlock()
+
wg.Done()
}(i)
}
@@ -288,14 +297,6 @@ func TestDoubleStart(t *testing.T) {
cancel()
<-ctx.Done()
wg.Wait()
-
- svc1.mu.Lock()
- count := svc1.count
- supervisors := svc1.supervisors
- if count != 1 {
- t.Error("wait service should have been started once:", count, "supervisor IDs:", supervisors)
- }
- svc1.mu.Unlock()
}
func TestRestart(t *testing.T) { | improve success detection of TestDoubleStart | cirello-io_supervisor | train | go |
ebc597b2ba69575b54339c479bead08202b02f1f | diff --git a/Gruntfile.js b/Gruntfile.js
index <HASH>..<HASH> 100644
--- a/Gruntfile.js
+++ b/Gruntfile.js
@@ -170,7 +170,7 @@ module.exports = function (grunt) {
require(__dirname + "/bin/lib/server.js")({
environment: "test",
serverRoot: __dirname + "/test/server/rest",
- documentRoot: __dirname + "test/server/public"
+ documentRoot: __dirname + "/test/server/public"
}, function(err, instance) {
if (!err) { | fixed documentRoot path in server-tests | rappid_rAppid.js | train | js |
ce7c41850ffd86081e383f5535225aba6bb1873a | diff --git a/spec/commands_spec.rb b/spec/commands_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/commands_spec.rb
+++ b/spec/commands_spec.rb
@@ -54,10 +54,10 @@ describe PryTheme::Command::PryTheme do
describe "'try' subcommand" do
it "temporary switches to a theme" do
- pry_eval('pry-theme try sick').should == "Using sick theme\n"
+ pry_eval('pry-theme try sick').should == "Using \"sick\" theme\n"
PryTheme::ThemeList.current_theme.name.should == 'sick'
- pry_eval('pry-theme try wholesome').should == "Using wholesome theme\n"
+ pry_eval('pry-theme try wholesome').should == "Using \"wholesome\" theme\n"
PryTheme::ThemeList.current_theme.name.should == 'wholesome'
end | Commands: fix tests
I merely forgot to update them. | kyrylo_pry-theme | train | rb |
0b2f271948466f958adca11b6b303d5ecb739564 | diff --git a/core-bundle/contao/dca/tl_content.php b/core-bundle/contao/dca/tl_content.php
index <HASH>..<HASH> 100644
--- a/core-bundle/contao/dca/tl_content.php
+++ b/core-bundle/contao/dca/tl_content.php
@@ -285,7 +285,7 @@ $GLOBALS['TL_DCA']['tl_content'] = array
'exclude' => true,
'search' => true,
'inputType' => 'text',
- 'eval' => array('maxlength'=>255, 'tl_class'=>'w50'),
+ 'eval' => array('maxlength'=>255, 'allowHtml'=>true, 'tl_class'=>'w50'),
'sql' => "varchar(255) NOT NULL default ''"
),
'floating' => array | [Core] Allow HTML input in image caption fields (see #<I>) | contao_contao | train | php |
89eb1d734ba81241d6357b9d2b3021f17fa56549 | diff --git a/chef/lib/chef/daemon.rb b/chef/lib/chef/daemon.rb
index <HASH>..<HASH> 100644
--- a/chef/lib/chef/daemon.rb
+++ b/chef/lib/chef/daemon.rb
@@ -44,7 +44,7 @@ class Chef
File.umask Chef::Config[:umask]
$stdin.reopen("/dev/null")
$stdout.reopen("/dev/null", "a")
- $stdout.reopen($stdout)
+ $stderr.reopen($stdout)
save_pid_file
at_exit { remove_pid_file }
rescue NotImplementedError => e | Reopen stdout and stderr, not stdout twice | chef_chef | train | rb |
0f108988b8c354c4d8f6291957c485bba5d6ba81 | diff --git a/salt/modules/zpool.py b/salt/modules/zpool.py
index <HASH>..<HASH> 100644
--- a/salt/modules/zpool.py
+++ b/salt/modules/zpool.py
@@ -353,22 +353,29 @@ def export(pool_name='', force='false'):
def import_(pool_name='', force='false'):
'''
- Import a storage pool
+ Import a storage pool or list pools available for import
CLI Example:
.. code-block:: bash
+ salt '*' zpool.import
salt '*' zpool.import myzpool [force=True|False]
'''
ret = {}
+ zpool = _check_zpool()
if not pool_name:
- ret['Error'] = 'zpool name parameter is mandatory'
+ cmd = '{0} import'.format(zpool)
+ res = __salt__['cmd.run'](cmd, ignore_retcode=True)
+ if not res:
+ ret['Error'] = 'No pools available for import'
+ else:
+ pool_list = [l for l in res.splitlines()]
+ ret['pools'] = pool_list
return ret
if exists(pool_name):
ret['Error'] = 'Storage pool {0} already exists. Import the pool under a different name instead'.format(pool_name)
else:
- zpool = _check_zpool()
if force is True:
cmd = '{0} import -f {1}'.format(zpool, pool_name)
else: | Adding ability in salt.modules.zpool.import to list available pools for import | saltstack_salt | train | py |
5b15d7da1572c2fe63741a1289ef7a53e0ad8c06 | diff --git a/src/CoreBundle/Command/RunTaskCommand.php b/src/CoreBundle/Command/RunTaskCommand.php
index <HASH>..<HASH> 100644
--- a/src/CoreBundle/Command/RunTaskCommand.php
+++ b/src/CoreBundle/Command/RunTaskCommand.php
@@ -63,17 +63,25 @@ class RunTaskCommand extends ContainerAwareCommand
return 0;
}
- $lockDir = $container->get('tenside.home')->tensideDataDir();
$lock = $container->get('tenside.taskrun_lock');
$logger->info('Acquire lock file.');
if (!$lock->lock()) {
- $logger->error('Could not acquire lock file.');
- throw new \RuntimeException(
- 'Another task appears to be running. ' .
- 'If this is not the case, please remove the lock file in ' .
- $lockDir
- );
+ $locked = false;
+ $retry = 3;
+ // Try up to 3 times to acquire with short delay in between.
+ while ($retry > 0) {
+ sleep(1000);
+ if ($locked = $lock->lock()) {
+ break;
+ }
+ }
+ if (!$locked) {
+ $logger->error('Could not acquire lock file.');
+ throw new \RuntimeException(
+ 'Another task appears to be running. If this is not the case, please remove the lock file.'
+ );
+ }
}
try { | Retry 3 times to acquire the lock in cli
This allows the GUI to release the lock after spawning the process | tenside_core | train | php |
0464524f60bd1e823cb1da889cd5a35e2ab4cc59 | diff --git a/platform_helper.py b/platform_helper.py
index <HASH>..<HASH> 100644
--- a/platform_helper.py
+++ b/platform_helper.py
@@ -19,7 +19,7 @@ import sys
def platforms():
return ['linux', 'darwin', 'freebsd', 'openbsd', 'solaris', 'sunos5',
- 'mingw', 'msvc']
+ 'mingw', 'msvc', 'gnukfreebsd8']
class Platform( object ):
def __init__( self, platform):
@@ -31,6 +31,8 @@ class Platform( object ):
self._platform = 'linux'
elif self._platform.startswith('freebsd'):
self._platform = 'freebsd'
+ elif self._platform.startswith('gnukfreebsd8'):
+ self._platform = 'freebsd'
elif self._platform.startswith('openbsd'):
self._platform = 'openbsd'
elif self._platform.startswith('solaris'): | Fixed the build on Debian/kfreebsd | ninja-build_ninja | train | py |
bfb65b733a2cfa0dac6a5760897f09d8e2557381 | diff --git a/docker/docker.go b/docker/docker.go
index <HASH>..<HASH> 100644
--- a/docker/docker.go
+++ b/docker/docker.go
@@ -40,15 +40,19 @@ func main() {
docker.NetworkBridgeIface = docker.DefaultNetworkBridge
}
- if strings.Contains(*flHost, ":") && len(strings.Split(*flHost, ":")) == 2 {
+ if strings.Contains(*flHost, ":") {
hostParts := strings.Split(*flHost, ":")
+ if len(hostParts) != 2 {
+ log.Fatal("Invalid bind address format.")
+ os.Exit(-1)
+ }
if hostParts[0] != "" {
host = hostParts[0]
}
if p, err := strconv.Atoi(hostParts[1]); err == nil {
port = p
}
- } else if !strings.Contains(*flHost, ":") {
+ } else {
host = *flHost
} | Simplify the Host flag parsing | moby_moby | train | go |
196151f86366fd384fe690b093ade804f188f10b | diff --git a/game.js b/game.js
index <HASH>..<HASH> 100644
--- a/game.js
+++ b/game.js
@@ -34,7 +34,27 @@ var Splat = (function(splat, window, document) {
});
}
+ function setCanvasSize() {
+ var ow = 1136;
+ var oh = 640;
+
+ var w = Math.min(window.innerWidth, ow);
+ var h = Math.min(window.innerHeight, oh);
+ canvas.style.width = w + "px";
+ canvas.style.height = h + "px";
+
+ if (w != ow || h != oh) {
+ canvas.width = oh / window.innerHeight * window.innerWidth;
+ canvas.height = oh;
+ }
+
+ // console.log(window.innerWidth + "x" + window.innerHeight + " - " + canvas.style.width + "x" + canvas.style.height + " - " + canvas.width + "x" + canvas.height);
+ }
+
function Game(canvas, manifest) {
+ window.addEventListener("resize", setCanvasSize);
+ setCanvasSize();
+
this.mouse = new splat.MouseInput(canvas);
this.keyboard = new splat.KeyboardInput(splat.keyMap.US); | Move canvas size management into Splat. | SplatJS_splat-ecs | train | js |
12597dedc58da64c234f1d9edb7edd17c47ecaca | diff --git a/src/stream/csv/parser.js b/src/stream/csv/parser.js
index <HASH>..<HASH> 100644
--- a/src/stream/csv/parser.js
+++ b/src/stream/csv/parser.js
@@ -124,7 +124,7 @@ var
_deduceSeparator: function () {
var header = this._header;
this._separator = _gpfArrayForEachFalsy(_gpfCsvSeparators, function (separator) {
- if (-1 !== header.indexOf(separator)) {
+ if (header.indexOf(separator) !== -1) {
return separator;
}
}) || _gpfCsvSeparators[0]; | !yoda style (#<I>) | ArnaudBuchholz_gpf-js | train | js |
b5884b5a865d51d15207dca696f9ae61cad758a5 | diff --git a/test/idempotency/C004_ComplexExpressions.java b/test/idempotency/C004_ComplexExpressions.java
index <HASH>..<HASH> 100644
--- a/test/idempotency/C004_ComplexExpressions.java
+++ b/test/idempotency/C004_ComplexExpressions.java
@@ -13,6 +13,7 @@ class C004_ComplexExpressions {
int bracedCast = ((Object) "foo").hashCode();
int chainedTernary = 1 == 2 ? 10 : 2 == 2 ? 1 : 2;
int bracedTernary = 5 + (1 == 1 ? (2 == 3 ? 5 : 7) : 8);
+ int multipleAssignments = ex = brace1 = brace2 *= chained1 += 10;
int postfixChained = ~brace1++;
double postfixChained2 = (double) brace1++;
int selfAssign = brace1;
diff --git a/test/idempotency/C009_ArrayAccess.java b/test/idempotency/C009_ArrayAccess.java
index <HASH>..<HASH> 100644
--- a/test/idempotency/C009_ArrayAccess.java
+++ b/test/idempotency/C009_ArrayAccess.java
@@ -4,7 +4,9 @@ class C009_ArrayAccess {
x[9] = 5;
int y = x[5];
int[][] z = new int[2][1];
+ z[0][0] = 1;
((z)[0])[0] = 1;
int b = ((z)[0])[0];
+ int c = z[0][0];
}
}
\ No newline at end of file | Fleshed out some samples so positions generated by toJCTree are properly tested. | rzwitserloot_lombok.ast | train | java,java |
9cd09874c2ab9ab8962e919fd2cd3b19731fe9e0 | diff --git a/backend/sublime/testdata/view_test.py b/backend/sublime/testdata/view_test.py
index <HASH>..<HASH> 100644
--- a/backend/sublime/testdata/view_test.py
+++ b/backend/sublime/testdata/view_test.py
@@ -26,9 +26,9 @@ hocus pocus
v.settings().set("test", 10)
assert v.settings().get("test") == 10
assert v.sel()[0] == (46, 46)
- v.run_command("move", {"by": "characters", "forward": True})
- assert v.sel()[0] == (47, 47)
v.run_command("move", {"by": "characters", "forward": False})
+ assert v.sel()[0] == (45, 45)
+ v.run_command("move", {"by": "characters", "forward": True})
assert v.sel()[0] == (46, 46)
except:
print(sys.exc_info()[1]) | backend/sublime: Fix broken test.
ST3 doesn't allow moving outside of the buffer. | limetext_backend | train | py |
4dd44bc5e894d5a6e7f40881f318fa67e9aa1a77 | diff --git a/lib/active_storage/service/s3_service.rb b/lib/active_storage/service/s3_service.rb
index <HASH>..<HASH> 100644
--- a/lib/active_storage/service/s3_service.rb
+++ b/lib/active_storage/service/s3_service.rb
@@ -4,8 +4,24 @@ require "active_support/core_ext/numeric/bytes"
class ActiveStorage::Service::S3Service < ActiveStorage::Service
attr_reader :client, :bucket
- def initialize(access_key_id:, secret_access_key:, region:, bucket:)
- @client = Aws::S3::Resource.new(access_key_id: access_key_id, secret_access_key: secret_access_key, region: region)
+ def initialize(access_key_id:, secret_access_key:, region:, bucket:, endpoint: nil)
+ @client = if endpoint
+ Aws::S3::Resource.new(
+ access_key_id: access_key_id,
+ secret_access_key: secret_access_key,
+ region: region,
+ bucket: bucket
+ )
+ else
+ Aws::S3::Resource.new(
+ access_key_id: access_key_id,
+ secret_access_key: secret_access_key,
+ region: region,
+ bucket: bucket,
+ endpoint: endpoint
+ )
+ end
+
@bucket = @client.bucket(bucket)
end | Allow custom endpoints for S3. | rails_rails | train | rb |
1896bcc043d3f389169e9859788fe5ce519e6194 | diff --git a/lib/mongodb-persister/mongoPersister.js b/lib/mongodb-persister/mongoPersister.js
index <HASH>..<HASH> 100644
--- a/lib/mongodb-persister/mongoPersister.js
+++ b/lib/mongodb-persister/mongoPersister.js
@@ -219,7 +219,7 @@ exports.listChannel = function (_app, _userId, done) {
exports.getChannel = function (_app, _channel, done) {
- Channel.find(
+ Channel.findOne(
{ 'app': _app,
'channel': _channel },
function (err, channel) { | issue#<I> is find -> findOne | xpush_node-xpush | train | js |
1a352d0b210add5edef7db1b2270aa4d1132cab7 | diff --git a/lib/bugsnag/integrations/delayed_job.rb b/lib/bugsnag/integrations/delayed_job.rb
index <HASH>..<HASH> 100644
--- a/lib/bugsnag/integrations/delayed_job.rb
+++ b/lib/bugsnag/integrations/delayed_job.rb
@@ -40,6 +40,7 @@ unless defined? Delayed::Plugins::Bugsnag
}
p[:object][:id] = object.id if object.respond_to?(:id)
end
+ add_active_job_details(p, payload)
overrides[:job][:payload] = p
end
@@ -54,6 +55,16 @@ unless defined? Delayed::Plugins::Bugsnag
super if defined?(super)
end
+
+ def add_active_job_details(p, payload)
+ if payload.respond_to?(:job_data) && payload.job_data.respond_to?(:[])
+ [:job_class, :job_id, :arguments].each do |key|
+ if (value = payload.job_data[key.to_s])
+ p[key] = value
+ end
+ end
+ end
+ end
end
callbacks do |lifecycle| | record information about ActiveJobs inside DelayedJobs | bugsnag_bugsnag-ruby | train | rb |
57e5474143d02e77b0c8279d755c58e0c8b7d91e | diff --git a/yii/console/controllers/AssetController.php b/yii/console/controllers/AssetController.php
index <HASH>..<HASH> 100644
--- a/yii/console/controllers/AssetController.php
+++ b/yii/console/controllers/AssetController.php
@@ -203,12 +203,16 @@ class AssetController extends Controller
$assetManager = $this->getAssetManager();
foreach ($bundle->depends as $dependencyName) {
if (!array_key_exists($dependencyName, $result)) {
+ if ($result[$dependencyName] === false) {
+ throw new Exception("A circular dependency is detected for target '{$dependencyName}'.");
+ }
$dependencyBundle = $assetManager->getBundle($dependencyName);
if ($dependencyBundle === null) {
throw new Exception("Unable to load dependency bundle '{$dependencyName}' for bundle '{$name}'.");
} else {
- $result[$dependencyName] = $dependencyBundle;
+ $result[$dependencyName] = false;
$this->loadBundleDependency($dependencyName, $dependencyBundle, $result);
+ $result[$dependencyName] = $dependencyBundle;
}
}
} | Method "yii\console\controllers\AssetController::loadBundleDependency()" has been improved allowing dection of the circular dependency. | yiisoft_yii2-debug | train | php |
15bafdb5183310bf1259d5f4cc2249578100c016 | diff --git a/lib/wechat/api_loader.rb b/lib/wechat/api_loader.rb
index <HASH>..<HASH> 100644
--- a/lib/wechat/api_loader.rb
+++ b/lib/wechat/api_loader.rb
@@ -72,11 +72,11 @@ module Wechat
private_class_method def self.config_from_file
if defined?(::Rails)
- config_file = ENV['WECHAT_CONF_FILE'] || Rails.root.join('config/wechat.yml')
+ config_file = ENV['WECHAT_CONF_FILE'] || Rails.root.join('config', 'wechat.yml')
resolve_config_file(config_file, Rails.env.to_s)
else
- rails_config_file = ENV['WECHAT_CONF_FILE'] || File.join(Dir.getwd, 'config/wechat.yml')
- application_config_file = File.join(Dir.getwd, 'config/application.yml')
+ rails_config_file = ENV['WECHAT_CONF_FILE'] || File.join(Dir.getwd, 'config', 'wechat.yml')
+ application_config_file = File.join(Dir.getwd, 'config', 'application.yml')
home_config_file = File.join(Dir.home, '.wechat.yml')
if File.exist?(rails_config_file)
rails_env = ENV['RAILS_ENV'] || 'development' | Fix #<I>, don't using '/' if possible. | Eric-Guo_wechat | train | rb |
ffd262e3f1830d7f3ee32da098509eb2091e3211 | diff --git a/lib/excon/ssl_socket.rb b/lib/excon/ssl_socket.rb
index <HASH>..<HASH> 100644
--- a/lib/excon/ssl_socket.rb
+++ b/lib/excon/ssl_socket.rb
@@ -49,8 +49,8 @@ module Excon
@socket.sync_close = true
if @proxy
- @socket << "CONNECT " << @connection_params[:host] << ":" << @connection_params[:port] << HTTP_1_1
- @socket << "Host: " << @connection_params[:host] << ":" << @connection_params[:port] << CR_NL << CR_NL
+ @socket << "CONNECT " << @connection_params[:host] << ":" << @connection_params[:port] << Excon::Connection::HTTP_1_1
+ @socket << "Host: " << @connection_params[:host] << ":" << @connection_params[:port] << Excon::Connection::CR_NL << Excon::Connection::CR_NL
# eat the proxy's connection response
while line = @socket.readline.strip
@@ -70,4 +70,4 @@ module Excon
end
end
-end
\ No newline at end of file
+end | Fix constant lookup on <I>
On <I> HTTP_1_1 and CR_LN can't be found directly like that, we have
to use the proper path. | excon_excon | train | rb |
ea78cf8d071ed0c082d89a86c016a17ba4c80611 | diff --git a/src/User/UserInterface.php b/src/User/UserInterface.php
index <HASH>..<HASH> 100644
--- a/src/User/UserInterface.php
+++ b/src/User/UserInterface.php
@@ -12,4 +12,4 @@ interface UserInterface extends \Symfony\Component\Security\Core\User\UserInterf
public function addRole($role);
public function removeRole($role);
-}
\ No newline at end of file
+} | Add missing newline to UserInterface :upside_down_face: | kleijnweb_jwt-bundle | train | php |
535ee9981d1d9eec61fcf6447cbba919d98a5b8c | diff --git a/bot/bot.py b/bot/bot.py
index <HASH>..<HASH> 100644
--- a/bot/bot.py
+++ b/bot/bot.py
@@ -38,7 +38,7 @@ class Bot:
def run(self):
self.logger.info(
- "Started",
+ "Starting",
"async: {async}".format(async=self.config.async()),
"Reusing connections: {reuse_connections}".format(reuse_connections=self.config.reuse_connections()),
"debug: {debug}".format(debug=self.config.debug()),
@@ -65,6 +65,7 @@ class Bot:
def process_pending_updates(self):
while self.get_and_process_updates(self.api.get_pending_updates) is not None:
pass
+ self.logger.info("Started", "All pending updates processed.")
def process_normal_updates(self):
while True: | Send a info message when all pending updates have been processed | alvarogzp_telegram-bot-framework | train | py |
1902db713e99c0c4bc38042e039328c9fbf7f2e1 | diff --git a/shared/chat/inbox/container/normal.js b/shared/chat/inbox/container/normal.js
index <HASH>..<HASH> 100644
--- a/shared/chat/inbox/container/normal.js
+++ b/shared/chat/inbox/container/normal.js
@@ -61,8 +61,8 @@ const getSmallRows = memoize(
const sortByTeamChannel = (a, b) =>
a.teamname === b.teamname
- ? a.channelname.localeCompare(b.channelname)
- : a.teamname.localeCompare(b.teamname)
+ ? a.channelname.localeCompare(b.channelname, undefined, {sensitivity: 'base'})
+ : a.teamname.localeCompare(b.teamname) // team names are normalized to lowercase
const getBigRows = memoize(
bigMetas => {
let lastTeam: ?string | android: Fix channel sorting (#<I>) | keybase_client | train | js |
30cfdc29edc7415f7d63be108f3312389c2df202 | diff --git a/tools/tinytest-codegen.py b/tools/tinytest-codegen.py
index <HASH>..<HASH> 100755
--- a/tools/tinytest-codegen.py
+++ b/tools/tinytest-codegen.py
@@ -50,7 +50,7 @@ test_dirs = ('basics', 'micropython', 'float', 'extmod', 'inlineasm') # 'import'
exclude_tests = (
'float/float2int_doubleprec.py', # requires double precision floating point to work
'inlineasm/asmfpaddsub.py', 'inlineasm/asmfpcmp.py', 'inlineasm/asmfpldrstr.py', 'inlineasm/asmfpmuldiv.py', 'inlineasm/asmfpsqrt.py',
- 'extmod/ticks_diff.py', 'extmod/time_ms_us.py',
+ 'extmod/ticks_diff.py', 'extmod/time_ms_us.py', 'extmod/uheapq_timeq.py',
'extmod/machine_pinbase.py', 'extmod/machine_pulse.py',
'extmod/vfs_fat_ramdisk.py', 'extmod/vfs_fat_fileio.py', 'extmod/vfs_fat_fsusermount.py', 'extmod/vfs_fat_oldproto.py',
) | tools/tinytest-codegen: Blacklist recently added uheapq_timeq test (qemu-arm). | micropython_micropython | train | py |
d44856612d7607cfdc4ce2538410e6222e001a3d | diff --git a/tests/test_subset.py b/tests/test_subset.py
index <HASH>..<HASH> 100644
--- a/tests/test_subset.py
+++ b/tests/test_subset.py
@@ -673,7 +673,7 @@ class TestSubsetShape:
def test_mask_multiregions(self):
ds = xr.open_dataset(self.nc_file)
regions = gpd.read_file(self.multi_regions_geojson)
-
+ regions.set_index("id")
mask = subset.create_mask(
x_dim=ds.lon, y_dim=ds.lat, poly=regions, wrap_lons=True
)
diff --git a/xclim/subset.py b/xclim/subset.py
index <HASH>..<HASH> 100644
--- a/xclim/subset.py
+++ b/xclim/subset.py
@@ -342,7 +342,7 @@ def create_mask(
"""
# Check for intersections
for i, (inda, pola) in enumerate(poly.iterrows()):
- for (indb, polb) in poly.loc[i + 1 :].iterrows():
+ for (indb, polb) in poly.iloc[i + 1 :].iterrows():
if pola.geometry.intersects(polb.geometry):
warnings.warn(
f"List of shapes contains overlap between {inda} and {indb}. Only {inda} will be used.", | Fix create_mask (#<I>)
Fix a typo that caused an index error in create_mask. | Ouranosinc_xclim | train | py,py |
0dc18655d6729c327a5504a7e524ab000ca13c30 | diff --git a/securesystemslib/gpg/common.py b/securesystemslib/gpg/common.py
index <HASH>..<HASH> 100644
--- a/securesystemslib/gpg/common.py
+++ b/securesystemslib/gpg/common.py
@@ -222,6 +222,7 @@ def parse_pubkey_bundle(data):
elif packet_type in [PACKET_TYPE_USER_ID, PACKET_TYPE_USER_ATTR,
PACKET_TYPE_SUB_KEY]:
key_bundle[packet_type][packet] = {
+ "header_len": header_len,
"body_len": body_len,
"signatures": []
} | Update gpg raw pubkey bundle data format
Also store the header length in raw gpg pubkey bundles for testing
purposes. | secure-systems-lab_securesystemslib | train | py |
3bc72581b593778539c6bc0c2e3f70987dcfb66f | diff --git a/nota/notaclass.py b/nota/notaclass.py
index <HASH>..<HASH> 100644
--- a/nota/notaclass.py
+++ b/nota/notaclass.py
@@ -462,6 +462,7 @@ class Nota:
self.con.execute("INSERT INTO notekeyword(noteId, keywordID) VALUES(?, ?)", [noteId, keywordId])
# Handle attachments, which must be existing files.
attachments = [key.lstrip().rstrip() for key in attachments.split(',')]
+ attachments = filter(None, attachments) # remove blanks
if not self.attachments_in_db:
if len(attachments) > 0:
try: | was bad if no attachment (now bad *if*) | dankelley_nota | train | py |
13a6dd0e5423ffd4150f3c93800f74ea3f32a8a9 | diff --git a/commitizen/commands/init.py b/commitizen/commands/init.py
index <HASH>..<HASH> 100644
--- a/commitizen/commands/init.py
+++ b/commitizen/commands/init.py
@@ -35,7 +35,6 @@ class Init:
out.info("cz bump --changelog")
out.success("The configuration are all set.")
else:
- # TODO: handle the case that config file exist but no value
out.line(f"Config file {self.config.path} already exists")
def _ask_config_path(self) -> str: | docs(init): remove unneeded TODO (the feature has been implemented) | Woile_commitizen | train | py |
b05ba080e2aba78d8b03ae1fe01d7c1b7fecc734 | diff --git a/events.go b/events.go
index <HASH>..<HASH> 100644
--- a/events.go
+++ b/events.go
@@ -42,6 +42,7 @@ const (
TAG = iota + 1
PUSH
FETCH
+ PUSH_FORCE
)
func (e EventType) String() string {
@@ -50,6 +51,8 @@ func (e EventType) String() string {
return "tag"
case PUSH:
return "push"
+ case PUSH_FORCE:
+ return "push-force"
case FETCH:
return "fetch"
}
@@ -67,6 +70,8 @@ func (e EventType) UnmarshalJSON(data []byte) error {
e = TAG
case "push":
e = PUSH
+ case "push-force":
+ e = PUSH_FORCE
case "fetch":
e = FETCH
default: | Leave PUSH_FORCE variable for backwards compatibility | AaronO_go-git-http | train | go |
1a21ead71c555a278371edcb512031a5536b7b10 | diff --git a/lib/linkedin/version.rb b/lib/linkedin/version.rb
index <HASH>..<HASH> 100644
--- a/lib/linkedin/version.rb
+++ b/lib/linkedin/version.rb
@@ -1,3 +1,3 @@
module LinkedIn
- VERSION = '0.0.11'
+ VERSION = '0.0.12'
end | Bump from <I> to <I> | bobbrez_linkedin2 | train | rb |
579f4577b3943bd8a21ad95be377bdf7d42fece7 | diff --git a/conn/command_append_test.go b/conn/command_append_test.go
index <HASH>..<HASH> 100644
--- a/conn/command_append_test.go
+++ b/conn/command_append_test.go
@@ -28,6 +28,8 @@ var _ = Describe("APPEND Command", func() {
// Ensure that the email was indeed appended
mbox := tConn.User.Mailboxes()[0]
Expect(mbox.Messages()).To(Equal(uint32(4)))
+ Expect(mbox.NextUID()).To(Equal(uint32(14)))
+
msg := mbox.MessageByUID(13)
Expect(msg.Header().Get("From")).To(Equal("[email protected]"))
Expect(msg.Header().Get("To")).To(Equal("[email protected]")) | Ensure that NextUID is incremented after APPEND
#<I> | jordwest_imap-server | train | go |
42bfbd13b05c7a9e00a92ef5886592e753a1b9a5 | diff --git a/pfr/finders/GamePlayFinder.py b/pfr/finders/GamePlayFinder.py
index <HASH>..<HASH> 100644
--- a/pfr/finders/GamePlayFinder.py
+++ b/pfr/finders/GamePlayFinder.py
@@ -63,7 +63,12 @@ def GamePlayFinder(**kwargs):
# except Exception as e:
# # if parsing goes wrong, return empty DataFrame
# raise e
- # return pd.DataFrame()
+ # return pd.DataFrame(columns=cols)
+
+ plays['Year'] = plays.Date.str[:4].astype(int)
+ plays['Month'] = plays.Date.str[4:6].astype(int)
+ plays['Date'] = plays.Date.str[6:8].astype(int)
+ plays = plays.rename({'Date': 'Boxscore'})
return plays | added year, month, day to GPF results | mdgoldberg_sportsref | train | py |
b4c7c058391cf59c2b41fdb85453f2cbd73db4cd | diff --git a/molgenis-data-security/src/main/java/org/molgenis/data/security/auth/GroupService.java b/molgenis-data-security/src/main/java/org/molgenis/data/security/auth/GroupService.java
index <HASH>..<HASH> 100644
--- a/molgenis-data-security/src/main/java/org/molgenis/data/security/auth/GroupService.java
+++ b/molgenis-data-security/src/main/java/org/molgenis/data/security/auth/GroupService.java
@@ -53,6 +53,7 @@ public class GroupService
public static final Set<String> DEFAULT_ROLES = ImmutableSet.of(MANAGER, EDITOR, VIEWER);
+ @SuppressWarnings("squid:S00107")
GroupService(GroupFactory groupFactory, RoleFactory roleFactory, PackageFactory packageFactory,
DataService dataService, GroupMetadata groupMetadata, RoleMembershipService roleMembershipService,
RoleMetadata roleMetadata, RoleMembershipMetadata roleMembershipMetadata) | Suppress to many args warning, for now this service needs the dependencies | molgenis_molgenis | train | java |
31d625c0ca5170937a46a9154cd67cf4c9cd5ea6 | diff --git a/lib/imapSimple.js b/lib/imapSimple.js
index <HASH>..<HASH> 100644
--- a/lib/imapSimple.js
+++ b/lib/imapSimple.js
@@ -144,6 +144,7 @@ ImapSimple.prototype.search = function (searchCriteria, fetchOptions, callback)
function fetchOnMessage(message, seqNo) {
getMessage(message).then(function (message) {
+ message.seqNo = seqNo;
messages[seqNo] = message;
messagesRetrieved++; | Add seqno to message, so it can be used when identify the message when it got deleted and the expunge event is received | chadxz_imap-simple | train | js |
bc13001533208d83af490cb64449e804cb4cb91b | diff --git a/src/Psalm/Codebase.php b/src/Psalm/Codebase.php
index <HASH>..<HASH> 100644
--- a/src/Psalm/Codebase.php
+++ b/src/Psalm/Codebase.php
@@ -1659,7 +1659,7 @@ class Codebase
null,
null,
$function_name,
- $function_name . '()',
+ $function_name . (count($function->params) !== 0 ? '($0)' : '()'),
null
);
} | Put cursor in first param position | vimeo_psalm | train | php |
83531672f79ed150a975eb101cfad9227950f112 | diff --git a/animatplot/blocks/image_like.py b/animatplot/blocks/image_like.py
index <HASH>..<HASH> 100644
--- a/animatplot/blocks/image_like.py
+++ b/animatplot/blocks/image_like.py
@@ -60,7 +60,7 @@ class Pcolormesh(Block):
return self.quad
def __len__(self):
- return self.C.shape[2]
+ return self.C.shape[self.t_axis]
def _make_pcolormesh_slice(self, i, dim):
if self._is_list: | Fix Pcolormesh.__len__
Currently the code assumes that the last dimension is the "time axis". Replace with `self.t_axis` as in Imagesc. | t-makaro_animatplot | train | py |
d34ee3479151f6b7adb547ce47fc028b17984bbe | diff --git a/test/buddha_test.js b/test/buddha_test.js
index <HASH>..<HASH> 100644
--- a/test/buddha_test.js
+++ b/test/buddha_test.js
@@ -2,7 +2,8 @@ var vows = require('vows'),
assert = require('assert'),
buddha = require('../lib/buddha.js');
-vows.describe('testing buddha test credentials').addBatch({
+vows.describe('buddha')
+.addBatch({
'with valid credentials': {
topic: {
email: "[email protected]",
@@ -22,6 +23,19 @@ vows.describe('testing buddha test credentials').addBatch({
teardown : function(b) {
b.setCredentials(null, null);
}
+ },
+ 'calling setOnBehalfOf' : {
+ topic: function(credentials) {
+ buddha.setOnBehalfOf(credentials.email);
+ return buddha;
+ },
+ 'should set onBehalfOf' : function(b) {
+ assert.equal(b.onBehalfOf, "[email protected]");
+ },
+ teardown : function(b) {
+ b.setOnBehalfOf(null);
+ }
}
+
}
}).export(module); | 'added tests for setOnBehalfOf' | roylines_node-buddha | train | js |
4d2c593a5d19e1c77ea9567d2a9972267aca5246 | diff --git a/check.js b/check.js
index <HASH>..<HASH> 100644
--- a/check.js
+++ b/check.js
@@ -18,7 +18,7 @@ function printHumanReadableReport(issues) {
);
console.log(`[${issue.severity}] ${issue.title}`);
console.log(issue.items.map(item => item.report).join("\n"));
- })
+ });
console.log(
`--------------------------------------------------`
@@ -35,6 +35,9 @@ function printJsonReport(issues) {
npmFacade.runNpmCommand('audit', { ignoreExit: true })
.then(input => {
+ if (input.error) {
+ throw new Error(`'npm audit' failed with ${input.error.code}. Check the log above for more details.`);
+ }
if (!argv.json) {
console.log(`Total of ${input.actions.length} actions to process`);
} | Added simple check to handle error instead breaking 2 lines below. | naugtur_npm-audit-resolver | train | js |
f3afdd1ff0b19f65fa3b04f08956e406c0024585 | diff --git a/kubetest/local.go b/kubetest/local.go
index <HASH>..<HASH> 100644
--- a/kubetest/local.go
+++ b/kubetest/local.go
@@ -213,6 +213,14 @@ func (n localCluster) TestSetup() error {
}
func (n localCluster) Down() error {
+ processes := []string{
+ "cloud-controller-manager",
+ "hyperkube", // remove hyperkube when it is removed from local-up-cluster.sh
+ "kube-controller-manager",
+ "kube-proxy",
+ "kube-scheduler",
+ "kubelet",
+ }
// create docker client
cli, err := client.NewEnvClient()
if err != nil {
@@ -220,9 +228,9 @@ func (n localCluster) Down() error {
}
// make sure all containers are removed
removeAllContainers(cli)
- err = control.FinishRunning(exec.Command("pkill", "hyperkube"))
+ err = control.FinishRunning(exec.Command("pkill", processes...))
if err != nil {
- log.Printf("unable to kill hyperkube processes: %v", err)
+ log.Printf("unable to kill kubernetes processes: %v", err)
}
err = control.FinishRunning(exec.Command("pkill", "etcd"))
if err != nil { | Remove additional processes for local-up-cluster
Change-Id: Idb<I>d7cabad<I>ba3d2f8d<I>dde<I>f0dc9 | kubernetes_test-infra | train | go |
0051c89cba02d55236c913ce0110f7d5111ba436 | diff --git a/airflow/providers/google/cloud/transfers/sql_to_gcs.py b/airflow/providers/google/cloud/transfers/sql_to_gcs.py
index <HASH>..<HASH> 100644
--- a/airflow/providers/google/cloud/transfers/sql_to_gcs.py
+++ b/airflow/providers/google/cloud/transfers/sql_to_gcs.py
@@ -115,7 +115,6 @@ class BaseSQLToGCSOperator(BaseOperator):
self.parameters = parameters
self.gcp_conn_id = gcp_conn_id
self.delegate_to = delegate_to
- self.parameters = parameters
def execute(self, context):
self.log.info("Executing query") | nitpick fix (#<I>) | apache_airflow | train | py |
b0ade1bfa79d9efa8e52fb5a28d578c5b2d94595 | diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -123,6 +123,7 @@ setup(name='allennlp',
'scipy',
'pytz==2017.3',
'unidecode',
+ 'matplotlib==2.2.3',
'pytest',
'flaky',
'responses>=0.7', | add matplotlib to setup.py (#<I>)
😞 😞 | allenai_allennlp | train | py |
217feb8e11219f7d43785b324535ac376f8fc8e7 | diff --git a/Classes/Neos/Neos/Ui/Domain/Model/Changes/Property.php b/Classes/Neos/Neos/Ui/Domain/Model/Changes/Property.php
index <HASH>..<HASH> 100644
--- a/Classes/Neos/Neos/Ui/Domain/Model/Changes/Property.php
+++ b/Classes/Neos/Neos/Ui/Domain/Model/Changes/Property.php
@@ -159,7 +159,7 @@ class Property extends AbstractChange
$reloadIfChangedConfigurationPath = sprintf('properties.%s.ui.reloadIfChanged', $propertyName);
if ($node->getNodeType()->getConfiguration($reloadIfChangedConfigurationPath)) {
- if ($this->getNodeDomAddress()->getFusionPath()) {
+ if ($this->getNodeDomAddress()->getFusionPath() && $node->getParent()->getNodeType()->isOfType('Neos.Neos:ContentCollection')) {
$reloadContentOutOfBand = new ReloadContentOutOfBand();
$reloadContentOutOfBand->setNode($node);
$reloadContentOutOfBand->setNodeDomAddress($this->getNodeDomAddress()); | BUGFIX: only reload out of band direct children of CCs | neos_neos-ui | train | php |
a25839758841cb27f08311e497459634aa9c4aec | diff --git a/src/request_handlers/webelement_request_handler.js b/src/request_handlers/webelement_request_handler.js
index <HASH>..<HASH> 100644
--- a/src/request_handlers/webelement_request_handler.js
+++ b/src/request_handlers/webelement_request_handler.js
@@ -143,9 +143,9 @@ ghostdriver.WebElementReqHand = function(id, session) {
},
_getAttributeCommand = function(req, res) {
- var attributeValueAtom = require("./webdriver_atoms.js").get("get_attribute_value");
- var attributeName = req.urlParsed.file;
- var response = _session.getCurrentWindow().evaluate(attributeValueAtom, _getJSON(), attributeName);
+ var attributeValueAtom = require("./webdriver_atoms.js").get("get_attribute_value"),
+ attributeName = req.urlParsed.file,
+ response = _session.getCurrentWindow().evaluate(attributeValueAtom, _getJSON(), attributeName);
res.respondBasedOnResult(_session, req, response);
}, | Grouped multiple var declarations. | detro_ghostdriver | train | js |
b20959c21418e2843d440a59999a7f9653135882 | diff --git a/lib/dpl/version.rb b/lib/dpl/version.rb
index <HASH>..<HASH> 100644
--- a/lib/dpl/version.rb
+++ b/lib/dpl/version.rb
@@ -1,3 +1,3 @@
module DPL
- VERSION = '1.8.35'
+ VERSION = '1.8.36'
end | Bump to version <I> | travis-ci_dpl | train | rb |
1544fc15ff190161f395bbe137a23de498c32315 | diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -96,7 +96,7 @@ Vodo.prototype.updateAPI = function () {
json: true,
timeout: 10000
})
- .then(res => {
+ .then(function(res) {
let data = res.data
/*
data = _.map (helpers.formatForButter(data), function (item) {
@@ -183,7 +183,7 @@ Vodo.prototype.random = function () {
};
axios(apiUrl[index], options)
- .then( res => {
+ .then(function(res) {
let data = res.data
if (res.statusCode >= 400 || (data && !data.data)) {
console.error('Vodo API endpoint \'%s\' failed.', apiUrl); | Change API response handler
Butter unable to finish loading due to "Unexpected token =>" error.
This change should fix the issue without introducing a new behaviour. | butterproviders_butter-provider-vodo | train | js |
ca180441584ebaff128bf21ef7893ee6e6422202 | diff --git a/client/html/themes/default/aimeos.js b/client/html/themes/default/aimeos.js
index <HASH>..<HASH> 100644
--- a/client/html/themes/default/aimeos.js
+++ b/client/html/themes/default/aimeos.js
@@ -2337,7 +2337,6 @@ jQuery(function() {
var rtl = $('html').attr("dir") == 'rtl';
$('.product .image-single').slick({
- asNavFor: '.product .thumbs',
slidesToShow: 1,
slidesToScroll: 1,
rtl: rtl, | Workaround for Slick thumb slider problem | aimeos_ai-client-html | train | js |
df4ee82642b8bdc043b21682f12e992c34e40fed | diff --git a/influxdb/tests/client_test.py b/influxdb/tests/client_test.py
index <HASH>..<HASH> 100644
--- a/influxdb/tests/client_test.py
+++ b/influxdb/tests/client_test.py
@@ -710,6 +710,28 @@ class TestInfluxDBClient(unittest.TestCase):
self.assertListEqual(self.cli.get_list_users(), [])
+ def test_grant_admin_privileges(self):
+ example_response = '{"results":[{}]}'
+
+ with requests_mock.Mocker() as m:
+ m.register_uri(
+ requests_mock.GET,
+ "http://localhost:8086/query",
+ text=example_response
+ )
+ self.cli.grant_admin_privileges('test')
+
+ self.assertEqual(
+ m.last_request.qs['q'][0],
+ 'grant all privileges to test'
+ )
+
+ @raises(Exception)
+ def test_grant_admin_privileges_invalid(self):
+ cli = InfluxDBClient('host', 8086, 'username', 'password')
+ with _mocked_session(cli, 'get', 400):
+ self.cli.grant_admin_privileges('')
+
def test_revoke_admin_privileges(self):
example_response = '{"results":[{}]}' | Add tests for grant_admin_privileges()
These tests simply check for correct query generation and exception
handling if used incorrectly by passing in an empty string as username. | influxdata_influxdb-python | train | py |
6a896a3e30c243a34fb3d08ab89c17b3e8791763 | diff --git a/pickfirst.go b/pickfirst.go
index <HASH>..<HASH> 100644
--- a/pickfirst.go
+++ b/pickfirst.go
@@ -125,7 +125,7 @@ func (b *pickfirstBalancer) Close() {
}
func (b *pickfirstBalancer) ExitIdle() {
- if b.state == connectivity.Idle {
+ if b.sc != nil && b.state == connectivity.Idle {
b.sc.Connect()
}
} | pickfirst: check b.sc before calling Connect (#<I>) | grpc_grpc-go | train | go |
32f7637fc20ba0eeaa2560914c1c12bf60ab4bcb | diff --git a/bundles/org.eclipse.orion.client.editor/web/orion/textview/textView.js b/bundles/org.eclipse.orion.client.editor/web/orion/textview/textView.js
index <HASH>..<HASH> 100644
--- a/bundles/org.eclipse.orion.client.editor/web/orion/textview/textView.js
+++ b/bundles/org.eclipse.orion.client.editor/web/orion/textview/textView.js
@@ -5552,13 +5552,15 @@ define("orion/textview/textView", ['orion/textview/textModel', 'orion/textview/k
},
_setWrapMode: function (wrapMode, init) {
this._wrapMode = wrapMode;
- var clientDiv = this._clientDiv;
+ var clientDiv = this._clientDiv, viewDiv = this._viewDiv;
if (wrapMode) {
clientDiv.style.whiteSpace = "pre-wrap"; //$NON-NLS-0$
clientDiv.style.wordWrap = "break-word"; //$NON-NLS-0$
+ viewDiv.style.overflowX = "hidden"; //$NON-NLS-0$
} else {
clientDiv.style.whiteSpace = "pre"; //$NON-NLS-0$
clientDiv.style.wordWrap = "normal"; //$NON-NLS-0$
+ viewDiv.style.overflowX = "auto"; //$NON-NLS-0$
}
if (!init) {
this.redraw(); | text view should not have horizontal scrollbar in wrap mode | eclipse_orion.client | train | js |
a47318682199f371401fa16624511ab4209e36be | diff --git a/lib/firefox.js b/lib/firefox.js
index <HASH>..<HASH> 100644
--- a/lib/firefox.js
+++ b/lib/firefox.js
@@ -1,10 +1,10 @@
var exec = require("child_process").exec;
var defer = require("when").defer;
+var extend = require("underscore").extend;
var createProfile = require("./profile");
var console = require("./utils").console;
var normalizeBinary = require("./utils").normalizeBinary;
var getID = require("jetpack-id");
-
var TEST_RESULTS_REGEX = /\d+ of \d+ tests passed/i
/**
@@ -53,9 +53,14 @@ function runFirefox (manifest, options) {
console.log("Executing Firefox with args: " + args);
}
+ var env = extend({}, process.env, {
+ "XPCOM_DEBUG_BREAK": "stack",
+ "NS_TRACE_MALLOC_DISABLE_STACKS": "1"
+ });
+
// Use `process.std[out|err].write` to write to screen
// instead of console.log since console.logs are silenced during testing
- var task = exec(cmd, null, function(err, stdout, stderr) {
+ var task = exec(cmd, { env: env }, function(err, stdout, stderr) {
if (options.verbose) {
if (err) {
process.stdout.write('err: ' + err + '\n'); | Add additional env vars for the firefox process | mozilla-jetpack_jpm | train | js |
8189abc1f23b78e4cad3857848a1333fd77eca37 | diff --git a/activerecord/test/cases/serialized_attribute_test.rb b/activerecord/test/cases/serialized_attribute_test.rb
index <HASH>..<HASH> 100644
--- a/activerecord/test/cases/serialized_attribute_test.rb
+++ b/activerecord/test/cases/serialized_attribute_test.rb
@@ -1,11 +1,9 @@
# frozen_string_literal: true
require "cases/helper"
-require "models/topic"
require "models/person"
require "models/traffic_light"
require "models/post"
-require "bcrypt"
class SerializedAttributeTest < ActiveRecord::TestCase
fixtures :topics, :posts | Remove unused requires
`require "bcrypt"` is unsed since #<I>.
`require "models/topic"` is unused since <I>c<I>da<I>b2c2d<I>b<I>d<I>b1ba8b0d0. | rails_rails | train | rb |
4924fadb7e291e7ded9a57b166f6ebbc45602b53 | diff --git a/src/masonry.plugin.js b/src/masonry.plugin.js
index <HASH>..<HASH> 100644
--- a/src/masonry.plugin.js
+++ b/src/masonry.plugin.js
@@ -87,7 +87,7 @@ VueMasonryPlugin.install = function (Vue, options) {
})
})
},
- beforeDestroy: function (el) {
+ unbind: function (el) {
Events.$emit(EVENT_REMOVE, {
'element': el
}) | change beforeDestroyed to unbind | shershen08_vue-masonry | train | js |
8f45eaa8559c04b1daa0b4af670ad08fbce70715 | diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -8,14 +8,21 @@ Copyright (c) 2017-2018 Yao-Yuan Mao (yymao)
http://opensource.org/licenses/MIT
"""
+import os
from setuptools import setup
+with open(os.path.join(os.path.dirname(__file__), 'GCR.py')) as f:
+ for l in f:
+ if l.startswith('__version__ = '):
+ exec(l) #pylint: disable=W0122
+ break
+
setup(
name='GCR',
- version='0.6.3',
+ version=__version__, #pylint: disable=E0602
description='Generic Catalog Reader: A common reader interface for accessing generic catalogs',
url='https://github.com/yymao/generic-catalog-reader',
- download_url='https://github.com/yymao/generic-catalog-reader/archive/v0.6.3.zip',
+ download_url='https://github.com/yymao/generic-catalog-reader/archive/v{}.zip'.format(__version__), #pylint: disable=E0602
author='Yao-Yuan Mao',
author_email='[email protected]',
maintainer='Yao-Yuan Mao', | parse version from src | yymao_generic-catalog-reader | train | py |
249ef37e9780300e3e2c6ddfa1afe52ad9125cd4 | diff --git a/src/Token.php b/src/Token.php
index <HASH>..<HASH> 100644
--- a/src/Token.php
+++ b/src/Token.php
@@ -24,7 +24,7 @@ class Token
/**
* Create Token instance.
- * @param strin $filename Unique file identifier.
+ * @param string $filename Unique file identifier.
* @param Array $params Manipulation parameters.
* @param string $signKey Signing key used to secure URLs.
*/ | Fix bug with Token class DocBlocks. | thephpleague_glide | train | php |
f247e7e12d143dcf3dfffb27b31638505bd7286a | diff --git a/resources/lang/pl-PL/dashboard.php b/resources/lang/pl-PL/dashboard.php
index <HASH>..<HASH> 100644
--- a/resources/lang/pl-PL/dashboard.php
+++ b/resources/lang/pl-PL/dashboard.php
@@ -35,6 +35,7 @@ return [
'failure' => 'Coś poszło nie tak z aktualizacją zdarzenia',
],
],
+ 'reported_by' => 'Reported by :user',
'add' => [
'title' => 'Vorfall hinzufügen',
'success' => 'Dodano zdarzenie.', | New translations dashboard.php (Polish) | CachetHQ_Cachet | train | php |
246b582ddf3c2ec4939be9d55018d97066d7fac8 | diff --git a/actionpack/lib/action_controller/base.rb b/actionpack/lib/action_controller/base.rb
index <HASH>..<HASH> 100644
--- a/actionpack/lib/action_controller/base.rb
+++ b/actionpack/lib/action_controller/base.rb
@@ -1160,13 +1160,8 @@ module ActionController #:nodoc:
def reset_session #:doc:
request.reset_session
@_session = request.session
- #http://rails.lighthouseapp.com/projects/8994/tickets/1558-memory-problem-on-reset_session-in-around_filter#ticket-1558-1
- #MRI appears to have a GC related memory leak to do with the finalizer that is defined on CGI::Session
- ObjectSpace.undefine_finalizer(@_session)
- response.session = @_session
end
-
private
def render_for_file(template_path, status = nil, layout = nil, locals = {}) #:nodoc:
logger.info("Rendering #{template_path}" + (status ? " (#{status})" : '')) if logger | Remove CGI::Session memory leak patch | rails_rails | train | rb |
a58ca86a6e97066e0e670cf4d5fe7644511b42bf | diff --git a/samples/react/src/Components/Header.js b/samples/react/src/Components/Header.js
index <HASH>..<HASH> 100644
--- a/samples/react/src/Components/Header.js
+++ b/samples/react/src/Components/Header.js
@@ -1,4 +1,4 @@
-import React from 'react';
+import React, { useEffect } from 'react';
import {
Link,
useHistory
@@ -12,6 +12,18 @@ const Header = (props) => {
let loginState = props.loginState;
let history = useHistory();
+ useEffect(() => {
+ /**
+ * This hook should placed in a component that able to detect all history changes
+ * In this app, header can detect all history changes because part of all pages.
+ * If your app doesn't have a component like header which rendering on all pages,
+ * you should call this hook on App.js
+ */
+ return history.listen(() => {
+ Countly.initializeFeedbackPopups();
+ });
+ }, [history])
+
const onSignIn = () => {
let login = window.confirm("Do you want to login ?");
if(login) { | [react-feedback-use-case] added callback initializer to history change hook.
This hook should placed in a component that able to detect all history changes. In this app, header can detect all history changes because part of all pages.
If your app doesn't have a component like header which rendering on all pages, you should call this hook on App.js. | Countly_countly-sdk-web | train | js |
fc4424c32b33993a7d3c2fc2aa2334b003040996 | diff --git a/tape/src/main/java/com/squareup/tape2/QueueFile.java b/tape/src/main/java/com/squareup/tape2/QueueFile.java
index <HASH>..<HASH> 100644
--- a/tape/src/main/java/com/squareup/tape2/QueueFile.java
+++ b/tape/src/main/java/com/squareup/tape2/QueueFile.java
@@ -132,7 +132,7 @@ public final class QueueFile implements Closeable, Iterable<byte[]> {
@Private boolean closed;
- private static RandomAccessFile initializeFromFile(File file, boolean forceLegacy)
+ @Private static RandomAccessFile initializeFromFile(File file, boolean forceLegacy)
throws IOException {
if (!file.exists()) {
// Use a temp file so we don't leave a partially-initialized file. | Remove synthetic accessor method.
initializeFromFile is used in Builder.build(). | square_tape | train | java |
80166a972ed6b5bf59e5df20d132e2690de4aea9 | diff --git a/lib/endpoint.js b/lib/endpoint.js
index <HASH>..<HASH> 100644
--- a/lib/endpoint.js
+++ b/lib/endpoint.js
@@ -11,13 +11,6 @@ class Endpoint {
* Default schema for API calls
*/
this.schema = {
- query: [
- {
- name: 'benchmark',
- default: false,
- description: 'Enables benchmark mode which benchmarks the time for every function in the endpoint.'
- }
- ],
scope: '',
method: 'GET',
description: 'There is no description for this endpoint yet.' | don't convolute endpoint schema with benchmark param | cubic-js_cubic | train | js |
8cdae3402b13ab28c7876dc0ade908dbbb20da73 | diff --git a/amino/state.py b/amino/state.py
index <HASH>..<HASH> 100644
--- a/amino/state.py
+++ b/amino/state.py
@@ -215,7 +215,11 @@ tcs(Id, State)
class IOState(Generic[S, A], StateT[IO, S, A], tpe=IO):
- pass
+
+ @staticmethod
+ def delay(f: Callable[..., A], *a: Any, **kw: Any) -> 'IOState[S, A]':
+ return IOState.lift(IO.delay(f, *a, **kw))
+
tcs(IO, IOState) # type: ignore | `IOState.delay` | tek_amino | train | py |
a3a22a4be287b5929a5f16602122d5655f2ffbf6 | diff --git a/test/generators/github/store/active_record_generator_test.rb b/test/generators/github/store/active_record_generator_test.rb
index <HASH>..<HASH> 100644
--- a/test/generators/github/store/active_record_generator_test.rb
+++ b/test/generators/github/store/active_record_generator_test.rb
@@ -4,6 +4,7 @@ require "rails/test_help"
require "active_record"
require "rails/generators/test_case"
require "generators/github/ds/active_record_generator"
+require "mysql2"
class GithubDSActiveRecordGeneratorTest < Rails::Generators::TestCase
tests Github::Ds::Generators::ActiveRecordGenerator | Add explicit require, last attempt for travis | github_github-ds | train | rb |
58a17816ab88820db4ccc3b9ac8979c9a903ce86 | diff --git a/lib/phobos/test/helper.rb b/lib/phobos/test/helper.rb
index <HASH>..<HASH> 100644
--- a/lib/phobos/test/helper.rb
+++ b/lib/phobos/test/helper.rb
@@ -1,13 +1,15 @@
module Phobos
module Test
module Helper
+ Topic = 'test-topic'
+ Group = 'test-group'
KafkaMessage = Struct.new(:value)
def process_message(handler:, payload:, metadata:, force_encoding: nil)
listener = Phobos::Listener.new(
handler: handler,
- group_id: 'test-group',
- topic: 'test-topic',
+ group_id: Group,
+ topic: Topic,
force_encoding: force_encoding
) | Move hard coded test values to constant | phobos_phobos | train | rb |
7f434586c9bb7b8cf3dd3f0abb9bb066cab4c61b | diff --git a/models/classes/routing/ActionEnforcer.php b/models/classes/routing/ActionEnforcer.php
index <HASH>..<HASH> 100644
--- a/models/classes/routing/ActionEnforcer.php
+++ b/models/classes/routing/ActionEnforcer.php
@@ -133,7 +133,8 @@ class ActionEnforcer implements IExecutable
// Action method is invoked, passing request parameters as
// method parameters.
- common_Logger::d('Invoking '.get_class($controller).'::'.$action, ARRAY('GENERIS', 'CLEARRFW'));
+ $user = common_session_SessionManager::getSession()->getUser();
+ common_Logger::d('Invoking '.get_class($controller).'::'.$action.' by '.$user->getIdentifier(), ARRAY('GENERIS', 'CLEARRFW'));
call_user_func_array(array($controller, $action), $tabParam);
// Render the view if selected. | Added userid to invoke log entries | oat-sa_tao-core | train | php |
ffac137ec807becfafa2f247db346827b444d3f4 | diff --git a/tasks/jsdoc-plugin.js b/tasks/jsdoc-plugin.js
index <HASH>..<HASH> 100644
--- a/tasks/jsdoc-plugin.js
+++ b/tasks/jsdoc-plugin.js
@@ -46,7 +46,7 @@ module.exports = function jsDocTask(grunt) {
* @return {String} command the command ready to be executed
*/
var buildCmd = function(bin, sources, destination){
- var cmd = bin + ' -d ' + destination +' ' + sources.join(' ');
+ var cmd = '"' + bin + '"' + ' -d ' + destination +' ' + sources.join(' ');
grunt.log.debug(cmd);
return cmd;
}; | Fix for an error in windows.
This fixes the following error in windows:
$ grunt jsdoc
Running "jsdoc:dist" (jsdoc) task
>> jsdoc error: Error: Command failed: 'node_modules' is not recognized as an internal or external command,
>> operable program or batch file.
Warning: jsdoc failure Use --force to continue.
Aborted due to warnings. | krampstudio_grunt-jsdoc | train | js |
39691ba2f5664aa83720fa3c2a1ca14937d29009 | diff --git a/activesupport/test/core_ext/object/blank_test.rb b/activesupport/test/core_ext/object/blank_test.rb
index <HASH>..<HASH> 100644
--- a/activesupport/test/core_ext/object/blank_test.rb
+++ b/activesupport/test/core_ext/object/blank_test.rb
@@ -35,7 +35,7 @@ class BlankTest < ActiveSupport::TestCase
end
def test_presence_with_a_block
- assert_equal "SALLY", "sally".presence { upcase } || "Nobody"
+ assert_equal "THIS WAS TENDERLOVE'S IDEA", "this was tenderlove's idea".presence { upcase } || "Nobody"
assert_equal "Nobody", nil.presence { upcase } || "Nobody"
end
end | Clarify the origin of this great addition to Rails :trollface: :trollface :trollface: | rails_rails | train | rb |
3348f6638f0455d38467fb387ba26d191cb24bd8 | diff --git a/lib/init.js b/lib/init.js
index <HASH>..<HASH> 100644
--- a/lib/init.js
+++ b/lib/init.js
@@ -3,11 +3,11 @@
/*
* 全体の流れ
*/
-module.exports = (blueprint, destination, cwd, savedir = null) => {
+module.exports = (listFile, dest, cwd, savedir = null) => {
return (makeplan, confirm, save, done) => {
- makeplan(blueprint, cwd, (plan) => {
+ makeplan(listFile, cwd, (plan) => {
if (confirm(plan)) {
- return save(plan, destination, cwd, savedir, done);
+ return save(plan, dest, cwd, savedir, done);
}
});
}; | Changed variable names
- blueprint → listFile
- destination → dest | kesuiket_node-pickfile | train | js |
f86ad624e7c8a5c345eb325120b1f9a299aef7b3 | diff --git a/container.go b/container.go
index <HASH>..<HASH> 100644
--- a/container.go
+++ b/container.go
@@ -237,12 +237,13 @@ type ContainerInfoEntry struct {
}
type Metrics struct {
- MemoryStat ContainerMemoryStat
- CPUStat ContainerCPUStat
- DiskStat ContainerDiskStat
- NetworkStat ContainerNetworkStat
- PidStat ContainerPidStat
- Age time.Duration
+ MemoryStat ContainerMemoryStat
+ CPUStat ContainerCPUStat
+ DiskStat ContainerDiskStat
+ NetworkStat ContainerNetworkStat
+ PidStat ContainerPidStat
+ Age time.Duration
+ CPUEntitlement uint64
}
type ContainerMetricsEntry struct { | Add CPUEntitlement to Metrics struct
[#<I>] | cloudfoundry_garden | train | go |
488fad2c5c61b8f31c91687f8c19dacb6818b076 | diff --git a/python/mxnet/symbol/symbol.py b/python/mxnet/symbol/symbol.py
index <HASH>..<HASH> 100644
--- a/python/mxnet/symbol/symbol.py
+++ b/python/mxnet/symbol/symbol.py
@@ -90,7 +90,7 @@ class Symbol(SymbolBase):
<Symbol d>
<Symbol _plus0>
"""
- return (self[i] for i in self.list_outputs())
+ return (self[i] for i in range(len(self)))
def __add__(self, other):
"""x.__add__(y) <=> x+y
diff --git a/tests/python/unittest/test_symbol.py b/tests/python/unittest/test_symbol.py
index <HASH>..<HASH> 100644
--- a/tests/python/unittest/test_symbol.py
+++ b/tests/python/unittest/test_symbol.py
@@ -367,6 +367,11 @@ def test_simple_bind_gradient_graph_possible_with_cycle():
res = data + data + data + data + data + data + data + data
res.simple_bind(ctx=mx.cpu(), data=(1,))
+def test_children_same_name():
+ a = mx.sym.Variable('data')
+ b = a + a
+ for c in b.get_children():
+ pass
if __name__ == '__main__':
import nose | Fix iterator over symbol when multiple children have the same name (#<I>) | apache_incubator-mxnet | train | py,py |
39fd47839d71bd2fc4d8ca6a556dcbc33b853ba1 | diff --git a/spyderlib/config.py b/spyderlib/config.py
index <HASH>..<HASH> 100644
--- a/spyderlib/config.py
+++ b/spyderlib/config.py
@@ -157,7 +157,7 @@ DEFAULTS = [
'truncate': True,
'minmax': False,
'collvalue': False,
- 'remote_editing': True,
+ 'remote_editing': False,
}),
('editor',
{ | Variable explorer: "remote-process editing" option is now turned off by default, i.e. variables are edited in Spyder's process to avoid any PyQt API compatibility issue when running scripts using a different PyQt API than Spyder's
For the record, the remote editing feature is quite useful when editing large arrays (for example) without having to send a copy of these variables from the remote process to Spyder's through the communication process | spyder-ide_spyder | train | py |
bc1ad72a0e8749e70752d40949f6e2a0e3379b82 | diff --git a/server/examples/streaming/direct_kafka_word_count.js b/server/examples/streaming/direct_kafka_word_count.js
index <HASH>..<HASH> 100644
--- a/server/examples/streaming/direct_kafka_word_count.js
+++ b/server/examples/streaming/direct_kafka_word_count.js
@@ -23,6 +23,10 @@
* Example:
* $ bin/eclairjs.sh examples/streaming/direct_kafka_word_count.js broker1-host:port,broker2-host:port topic1,topic2
*/
+
+print("TODO: Kafka Direct Streaming Not Implemented!");
+java.lang.System.exit(0);
+
var KafkaUtils = require('eclairjs/streaming/kafka/KafkaUtils');
var Duration = require('eclairjs/streaming/Duration');
var StreamingContext = require('eclairjs/streaming/StreamingContext'); | kafka direct streaming example not implemented warning | EclairJS_eclairjs | train | js |
17c84299662f392552c0793d2f72ce0417452b0d | diff --git a/src/initialize-handler.js b/src/initialize-handler.js
index <HASH>..<HASH> 100644
--- a/src/initialize-handler.js
+++ b/src/initialize-handler.js
@@ -1,7 +1,7 @@
import {offset} from './render-size';
export default function(view, prevHandler, el, constructor) {
- var handler = new constructor()
+ var handler = new constructor(view.loader())
.scene(view.scenegraph().root)
.initialize(el, offset(view), view); | Pass view loader to scenegraph handler. (vega/vega#<I>) | vega_vega-view | train | js |
e7f9ae50e57e4de7d9f95f4842237fbcd1dcdac1 | diff --git a/lib/classes/session/manager.php b/lib/classes/session/manager.php
index <HASH>..<HASH> 100644
--- a/lib/classes/session/manager.php
+++ b/lib/classes/session/manager.php
@@ -933,6 +933,16 @@ class manager {
if ($generateevent) {
$event->trigger();
}
+
+ // Queue migrating the messaging data, if we need to.
+ if (!get_user_preferences('core_message_migrate_data', false, $userid)) {
+ // Check if there are any legacy messages to migrate.
+ if (\core_message\helper::legacy_messages_exist($userid)) {
+ \core_message\task\migrate_message_data::queue_task($userid);
+ } else {
+ set_user_preference('core_message_migrate_data', true, $userid);
+ }
+ }
}
/** | MDL-<I> messaging: Loginas upgrade check
When using login as, we also need to check if there are legacy messages that
need updating and set the user preference if required. | moodle_moodle | train | php |
51c76ff0e8d74fd6a339535974ed0edd811ba38c | diff --git a/tests/ValidatingTraitTest.php b/tests/ValidatingTraitTest.php
index <HASH>..<HASH> 100644
--- a/tests/ValidatingTraitTest.php
+++ b/tests/ValidatingTraitTest.php
@@ -70,7 +70,7 @@ class ValidatingTraitTest extends PHPUnit_Framework_TestCase
public function testGetValidationAttributeNames()
{
- $this->assertNull($this->trait->getValidationAttributeNames());
+ $this->assertEmpty($this->trait->getValidationAttributeNames());
}
public function testSetValidationAttributeNames() | Update unit test to check for empty array | dwightwatson_validating | train | php |
3208959b41b01bc3a633c7ea0c42314b1249789a | diff --git a/test/test_replacer.py b/test/test_replacer.py
index <HASH>..<HASH> 100644
--- a/test/test_replacer.py
+++ b/test/test_replacer.py
@@ -24,6 +24,12 @@ def assert_not_replaced(filename):
assert "old" in path.Path(filename).text()
+def ensure_matching_file(src):
+ src = path.Path(src)
+ src.parent.makedirs_p()
+ src.write_text("this is old")
+
+
def test_help(capsys):
with pytest.raises(SystemExit) as e:
replacer.main(["--help"])
@@ -69,5 +75,11 @@ def test_exclude_extension(test_path):
def test_exclude_directory(test_path):
- replacer.main(["old", "new", "--go", "--exclude", "a_dir/*"])
- assert_not_replaced("a_dir/sub/foo.txt")
+ one = "node_modules/one.js"
+ two = "packages/foo/node_modules/two.js"
+ for f in one, two:
+ ensure_matching_file(f)
+
+ replacer.main(["old", "new", "--go", "--exclude", "node_modules/*"])
+ assert_not_replaced(one)
+ assert_not_replaced(two) | test for --exclude: more realistic example | dmerejkowsky_replacer | train | py |
89eac44c58810df3f1b6ddfea54ae8db5a09f18e | diff --git a/src/Moip.php b/src/Moip.php
index <HASH>..<HASH> 100644
--- a/src/Moip.php
+++ b/src/Moip.php
@@ -176,7 +176,7 @@ class Moip
*/
public function transfers()
{
- return new transfers($this);
+ return new Transfers($this);
}
/** | class reference is not case sensitive, but common sense prevails | wirecardBrasil_moip-sdk-php | train | php |
74f4e905b0d72aa07acb9b6e20229aac188de33c | diff --git a/src/org/jgroups/JChannel.java b/src/org/jgroups/JChannel.java
index <HASH>..<HASH> 100644
--- a/src/org/jgroups/JChannel.java
+++ b/src/org/jgroups/JChannel.java
@@ -75,7 +75,7 @@ import java.util.concurrent.Exchanger;
* the construction of the stack will be aborted.
*
* @author Bela Ban
- * @version $Id: JChannel.java,v 1.207 2008/10/30 09:34:57 vlada Exp $
+ * @version $Id: JChannel.java,v 1.208 2008/10/30 10:02:52 belaban Exp $
*/
@MBean(description="JGroups channel")
public class JChannel extends Channel {
@@ -1563,8 +1563,10 @@ public class JChannel extends Channel {
if(log.isInfoEnabled())
log.info("JGroups version: " + Version.description);
- ConfiguratorFactory.substituteVariables(configurator); // replace vars with system props
+ // ConfiguratorFactory.substituteVariables(configurator); // replace vars with system props
String tmp=configurator.getProtocolStackString();
+ tmp=Util.substituteVariable(tmp); // replace vars with system props
+
prot_stack=new ProtocolStack(this, tmp);
try {
prot_stack.setup(); // Setup protocol stack (creates protocol, calls init() on them) | cancelled Vladimir's change: with my modification, PlainConfigurator works, too | belaban_JGroups | train | java |
2e18c50080701ad3e91f99caca3109d97f0b63b2 | diff --git a/scripts/tdd.js b/scripts/tdd.js
index <HASH>..<HASH> 100644
--- a/scripts/tdd.js
+++ b/scripts/tdd.js
@@ -7,7 +7,10 @@ const snippetFiles = fs.readdirSync(SNIPPETS_PATH, 'utf8').map(fileName => fileN
fs.removeSync(TEST_PATH);
+const errSnippets = ['JSONToFile', 'readFileLines', 'UUIDGeneratorNode'];
+
snippetFiles
+ .filter(fileName => !errSnippets.includes(fileName))
.map(fileName => {
fs.ensureDirSync(`${TEST_PATH}/${fileName}`);
return fileName; | filter out errSnippets | 30-seconds_30-seconds-of-code | train | js |
96eca1bdd7fe56c201ea83030154dd8bea6d3e3c | diff --git a/webapps/ui/tasklist/client/scripts/task/directives/cam-tasklist-task.js b/webapps/ui/tasklist/client/scripts/task/directives/cam-tasklist-task.js
index <HASH>..<HASH> 100644
--- a/webapps/ui/tasklist/client/scripts/task/directives/cam-tasklist-task.js
+++ b/webapps/ui/tasklist/client/scripts/task/directives/cam-tasklist-task.js
@@ -289,7 +289,7 @@ module.exports = [ function() {
if (!$scope.task || !$scope.taskExists) return;
taskResource.get($scope.task.id, function(err) {
- if (err) {
+ if (err && err.status === 404) {
$scope.taskExists = false;
$scope.$broadcast('taskremoved');
} | fix(task): keep task open when connection is disrupted
related to CAM-<I> | camunda_camunda-bpm-platform | train | js |
75a526c92f58ab7edbcda15bcfc344ffa4a2c01d | diff --git a/libs/verysimple/HTTP/RequestUtil.php b/libs/verysimple/HTTP/RequestUtil.php
index <HASH>..<HASH> 100644
--- a/libs/verysimple/HTTP/RequestUtil.php
+++ b/libs/verysimple/HTTP/RequestUtil.php
@@ -426,6 +426,9 @@ class RequestUtil
{
$_REQUEST = array();
$_FILES = array();
+
+ self::$bodyCache = "";
+ self::$bodyCacheIsReady = false;
}
/** | updated ClearAll to reset body contents as well | jasonhinkle_phreeze | train | php |
7d8e01b433b1a63bd0a12d7b8ac37af6fda5733d | diff --git a/h2o-core/src/main/java/water/init/NodePersistentStorage.java b/h2o-core/src/main/java/water/init/NodePersistentStorage.java
index <HASH>..<HASH> 100644
--- a/h2o-core/src/main/java/water/init/NodePersistentStorage.java
+++ b/h2o-core/src/main/java/water/init/NodePersistentStorage.java
@@ -56,7 +56,7 @@ public class NodePersistentStorage {
throw new IllegalArgumentException("NodePersistentStorage name not specified");
}
- if (! Pattern.matches("[\\-a-zA-Z0-9]+", keyName)) {
+ if (! Pattern.matches("[\\-a-zA-Z0-9_ \\(\\)]+", keyName)) {
throw new IllegalArgumentException("NodePersistentStorage illegal name");
}
} | Relax filename regex for node persistent storage | h2oai_h2o-3 | train | java |
b6a777a0063a4805513081be054523a40afd1295 | diff --git a/core_gem/lib/deep_cover/analyser/per_line.rb b/core_gem/lib/deep_cover/analyser/per_line.rb
index <HASH>..<HASH> 100644
--- a/core_gem/lib/deep_cover/analyser/per_line.rb
+++ b/core_gem/lib/deep_cover/analyser/per_line.rb
@@ -35,7 +35,7 @@ module DeepCover
end
def missed_empty_branch?(node)
- node.is_a?(Node::Branch) && node.branches.any? { |b| b.is_a?(Node::EmptyBody) && !Tools.covered?(node_runs(b)) }
+ node.is_a?(Node::Branch) && node.branches.any? { |b| b.is_a?(Node::EmptyBody) && node_runs(b) == 0 }
end
end
end | Fix per_line analyser to take into account ignore flags [#<I>] | deep-cover_deep-cover | train | rb |
Subsets and Splits