hash
stringlengths 40
40
| diff
stringlengths 131
26.7k
| message
stringlengths 7
694
| project
stringlengths 5
67
| split
stringclasses 1
value | diff_languages
stringlengths 2
24
|
---|---|---|---|---|---|
bab98cd969082eccde956b7d01aa994bc97edaf6 | diff --git a/actions/buildrelease.py b/actions/buildrelease.py
index <HASH>..<HASH> 100644
--- a/actions/buildrelease.py
+++ b/actions/buildrelease.py
@@ -71,7 +71,7 @@ class BuildReleaseAction:
else:
if repo.is_dirty():
repo.git.add(u=True)
- repo.git.commit(m='Release {}'.format(version))
+ repo.git.commit(m='[heliumcli] Release {}'.format(version))
repo.remotes["origin"].push("master")
tag = repo.create_tag(version, m="")
repo.remotes["origin"].push(tag) | Adding [heliumcli] to automated commit messages. | HeliumEdu_heliumcli | train | py |
34ea8102c7678995401d66ac67811e4c3d417d80 | diff --git a/src/sources/raster.js b/src/sources/raster.js
index <HASH>..<HASH> 100644
--- a/src/sources/raster.js
+++ b/src/sources/raster.js
@@ -47,9 +47,14 @@ export class RasterTileSource extends NetworkTileSource {
// Return texture info for a raster tile
tileTexture (tile) {
- let key = tile.coords.key;
+ let new_coords = tile.coords;
+ if (this.overzoom !== 0) {
+ // account for any overzoom parameter on the raster datasource
+ new_coords = Tile.coordinateAtZoom({x: tile.coords.x, y: tile.coords.y, z: tile.coords.z}, Math.max(tile.coords.z - this.overzoom, 0));
+ }
+ let key = new_coords.key;
if (!this.textures[key]) {
- let coords = Tile.coordinateWithMaxZoom(tile.coords, this.max_zoom);
+ let coords = Tile.coordinateWithMaxZoom(new_coords, this.max_zoom);
let url = this.formatUrl(this.url, { coords });
this.textures[key] = { url, filtering: this.filtering, coords };
} | account for rasters attached to vector sources | tangrams_tangram | train | js |
6ee6e412341b07a9d7f5b42c28e2f8d28a8432d9 | diff --git a/spacy/language.py b/spacy/language.py
index <HASH>..<HASH> 100644
--- a/spacy/language.py
+++ b/spacy/language.py
@@ -1279,7 +1279,7 @@ class Language:
"""Evaluate a model's pipeline components.
examples (Iterable[Example]): `Example` objects.
- batch_size (int): Batch size to use.
+ batch_size (Optional[int]): Batch size to use.
scorer (Optional[Scorer]): Scorer to use. If not passed in, a new one
will be created.
component_cfg (dict): An optional dictionary with extra keyword | Update docstring for Language.evaluate | explosion_spaCy | train | py |
cf751c4b55b8795963f3f2d21771ce3291cc3287 | diff --git a/spec/mysql2/result_spec.rb b/spec/mysql2/result_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/mysql2/result_spec.rb
+++ b/spec/mysql2/result_spec.rb
@@ -139,9 +139,10 @@ describe Mysql2::Result do
it "should raise an exception if streaming ended due to a timeout" do
# Create an extra client instance, since we're going to time it out
client = Mysql2::Client.new DatabaseCredentials['root']
- client.query "CREATE TEMPORARY TABLE streamingTest (val VARCHAR(10))"
+ client.query "CREATE TEMPORARY TABLE streamingTest (val BINARY(255))"
# Insert enough records to force the result set into multiple reads
+ # (the BINARY type is used simply because it forces full width results)
10000.times do |i|
client.query "INSERT INTO streamingTest (val) VALUES ('Foo #{i}')"
end | Need more rows to max out the packet size | brianmario_mysql2 | train | rb |
c251d4aa977da604735827a49d69f91e908f7dff | diff --git a/src/main/java/com/teklabs/gwt/i18n/client/LocaleFactory.java b/src/main/java/com/teklabs/gwt/i18n/client/LocaleFactory.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/teklabs/gwt/i18n/client/LocaleFactory.java
+++ b/src/main/java/com/teklabs/gwt/i18n/client/LocaleFactory.java
@@ -1,6 +1,7 @@
package com.teklabs.gwt.i18n.client;
import com.google.gwt.i18n.client.LocalizableResource;
+import com.teklabs.gwt.i18n.server.LocaleProxy;
import java.util.HashMap;
@@ -26,13 +27,14 @@ public class LocaleFactory {
if (m != null) {
return m;
}
- if (factory != null) {
- m = factory.create(cls);
- put(cls, m);
- return m;
- } else {
- throw new RuntimeException("Messages not found: " + cls);
+
+ if (factory == null) {
+ //uses default factory if dev do not pick one
+ LocaleProxy.initialize();
}
+ m = factory.create(cls);
+ put(cls, m);
+ return m;
}
} | Changed get() to auto-initilize the factory in case there is no factory available. | lightoze_gwt-i18n-server | train | java |
ff0a72484b2560682aced148b04aebb12d6d369d | diff --git a/tests/unittest_nodes.py b/tests/unittest_nodes.py
index <HASH>..<HASH> 100644
--- a/tests/unittest_nodes.py
+++ b/tests/unittest_nodes.py
@@ -148,21 +148,20 @@ def function(var):
ast = abuilder.string_build(code)
self.assertEqual(ast.as_string(), code)
- @test_utils.require_version("3.0")
- @unittest.expectedFailure
def test_3k_annotations_and_metaclass(self):
- code_annotations = textwrap.dedent(
- '''
- def function(var:int):
+ code = '''
+ def function(var: int):
nonlocal counter
class Language(metaclass=Natural):
"""natural language"""
'''
- )
+ code_annotations = textwrap.dedent(code)
+ # pylint: disable=line-too-long
+ expected = 'def function(var: int):\n nonlocal counter\n\n\nclass Language(metaclass=Natural):\n """natural language"""'
ast = abuilder.string_build(code_annotations)
- self.assertEqual(ast.as_string(), code_annotations)
+ self.assertEqual(ast.as_string().strip(), expected)
def test_ellipsis(self):
ast = abuilder.string_build("a[...]").body[0] | Change test that expected as_string() to return a particular number of newlines | PyCQA_astroid | train | py |
f9f1600f7c34919070a8a26d493b28f8a5c6dab3 | diff --git a/generators/generator-transforms.js b/generators/generator-transforms.js
index <HASH>..<HASH> 100644
--- a/generators/generator-transforms.js
+++ b/generators/generator-transforms.js
@@ -31,9 +31,9 @@ const prettierOptions = {
const prettierTransform = function(defaultOptions) {
const transform = (file, encoding, callback) => {
- if(file.state !== 'deleted') {
- /* resolve from the projects config */
- prettier.resolveConfig(file.relative).then(options => {
+ /* resolve from the projects config */
+ prettier.resolveConfig(file.relative).then(options => {
+ if(file.state !== 'deleted') {
const str = file.contents.toString('utf8');
if (!options || Object.keys(options).length === 0) {
options = defaultOptions;
@@ -42,9 +42,9 @@ const prettierTransform = function(defaultOptions) {
options.filepath = file.relative;
const data = prettier.format(str, options);
file.contents = Buffer.from(data);
- callback(null, file);
- });
- }
+ }
+ callback(null, file);
+ });
};
return through.obj(transform);
}; | Uodate condition when applying prettierTransform
We need to always call callback(null, file) otherwise the program
is stopped prematurely
#<I> | jhipster_generator-jhipster | train | js |
b2241ad47bdaebc94759a6601ff1a29c91ca2478 | diff --git a/core/src/main/java/org/infinispan/newstatetransfer/StateResponseCommand.java b/core/src/main/java/org/infinispan/newstatetransfer/StateResponseCommand.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/org/infinispan/newstatetransfer/StateResponseCommand.java
+++ b/core/src/main/java/org/infinispan/newstatetransfer/StateResponseCommand.java
@@ -42,7 +42,7 @@ public class StateResponseCommand extends BaseRpcCommand {
private static final Log log = LogFactory.getLog(StateResponseCommand.class);
- public static final byte COMMAND_ID = 17;
+ public static final byte COMMAND_ID = 20;
private int topologyId; | ISPN-<I> Assign another id to StateResponseCommand to fix duplicated id. | infinispan_infinispan | train | java |
eddecbd7aa4df3ebf945dda45755d89af11f951d | diff --git a/insights/client/__init__.py b/insights/client/__init__.py
index <HASH>..<HASH> 100644
--- a/insights/client/__init__.py
+++ b/insights/client/__init__.py
@@ -204,6 +204,14 @@ class InsightsClient(object):
'stdout': stdout,
'rc': return code}
"""
+ # if we are running in no_gpg or not gpg mode then return true
+ if not config["gpg"]:
+ return {'gpg': True,
+ 'stderr': None,
+ 'stdout': None,
+ 'rc': 0}
+
+ # if a valid egg path and gpg were received do the verification
if egg_path and gpg_key:
cmd_template = '/usr/bin/gpg --verify --keyring %s %s %s'
cmd = cmd_template % (gpg_key, egg_path + '.asc', egg_path) | want to bypass verification in no_gpg (#<I>) | RedHatInsights_insights-core | train | py |
291edfdd859bccc95f41b7305f5bb8ff6444ad75 | diff --git a/tests/test_client.py b/tests/test_client.py
index <HASH>..<HASH> 100644
--- a/tests/test_client.py
+++ b/tests/test_client.py
@@ -36,6 +36,8 @@ import tests
import pytest
+import httplib
+
class MyException(Exception):
"""Local exception class used in this testing module."""
@@ -126,7 +128,7 @@ class MockTransport(suds.transport.Transport):
send_data = [send_data]
self.mock_operation_log = []
self.mock_open_data = open_data
- self.mock_send_config = send_data
+ self.mock_send_data = send_data
super(MockTransport, self).__init__()
def open(self, request):
@@ -138,7 +140,10 @@ class MockTransport(suds.transport.Transport):
def send(self, request):
self.mock_operation_log.append(("send", request.url))
if self.mock_send_data:
- return suds.BytesIO(self.mock_send_data.pop(0))
+ status = httplib.OK
+ headers = {}
+ data = self.mock_send_data.pop(0)
+ return suds.transport.Reply(status, headers, data)
pytest.fail("Unexpected MockTransport.send() operation call.") | fix MockTransport.send() operation in tests/test_client.py
MockTransport.send() operation had some bugs in it:
- misnamed mock input data member in MockTransport.__init__()
- return value was constructed as if for MockTransport.open()
The operation is not used anywhere yet, but has not been removed since it will
soon be used in a new test currently under being developed (that is how the
bugs got detected in the first place). | ovnicraft_suds2 | train | py |
746b4d2da27c01a8724eb38e97f9ada21ce9bfe7 | diff --git a/lib/xcode/group.rb b/lib/xcode/group.rb
index <HASH>..<HASH> 100644
--- a/lib/xcode/group.rb
+++ b/lib/xcode/group.rb
@@ -75,23 +75,14 @@ module Xcode
end
#
- # @param [String] name of the group that you want to find or create.
+ # Find all the files that have have a name that matches the specified name.
#
- def find_or_create_group(name)
- found_group = group(name)
- found_group = create_group(name) if found_group.empty?
- Array(found_group).first
- end
-
+ # @param [String] name of the file that you are looking to return.
+ # @return [Array<FileReference>] the files with the same mathching
+ # name. This could be no files, one file, or multiple files.
#
- # Return a single reference that matches the name specified.
- #
- # @param [String] name of the file that want to return.
- # @return [Group,FileReference] the object that has the name matching the
- # the one specified.
- #
def file(name)
- group(name).first
+ files.find_all {|file| file.name == name or file.path == name }
end
#
diff --git a/spec/group_spec.rb b/spec/group_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/group_spec.rb
+++ b/spec/group_spec.rb
@@ -54,10 +54,8 @@ describe Xcode::Group do
end
describe "#files" do
-
it "should return the correct number of files within the group" do
subject.group('TestProject').first.files.count.should == 2
end
-
end
end
\ No newline at end of file | Group - removed the find_or_create_group and #file(name) will use correct method | rayh_xcoder | train | rb,rb |
9b62e333d769acfb634b251187648309f3597269 | diff --git a/num2words/lang_TR.py b/num2words/lang_TR.py
index <HASH>..<HASH> 100644
--- a/num2words/lang_TR.py
+++ b/num2words/lang_TR.py
@@ -806,6 +806,10 @@ class Num2Word_TR(Num2Word_Base):
return wrd
+ def to_ordinal_num(self, value):
+ self.verify_ordinal(value)
+ return "%s%s" % (value, self.to_ordinal(value)[-4:])
+
def to_splitnum(self, val):
float_digits = str(int(val * 10 ** self.precision))
if not int(val) == 0: | [ADD] to ordinal number for Turkish | savoirfairelinux_num2words | train | py |
70b916dae7b7d8558ffb41ad73d1d295260b9963 | diff --git a/wxmplot/plotpanel.py b/wxmplot/plotpanel.py
index <HASH>..<HASH> 100644
--- a/wxmplot/plotpanel.py
+++ b/wxmplot/plotpanel.py
@@ -110,7 +110,6 @@ class PlotPanel(BasePanel):
self.set_y2label(y2label, delay_draw=True)
if title is not None:
self.set_title(title, delay_draw=True)
-
self.use_datas = ifnotNone(use_dates, self.use_dates)
return self.oplot(xdata, ydata, side=side, **kws)
@@ -374,7 +373,7 @@ class PlotPanel(BasePanel):
conf.viewpad = viewpad
axes = self.axes
- self.conf.user_limits[axes] = (xmin, xmax, ymin, ymax)
+ self.conf.user_limits[axes] = [xmin, xmax, ymin, ymax]
self.conf.axes_traces = {axes: [0]}
self.conf.set_trace_label('scatterplot')
@@ -450,7 +449,7 @@ class PlotPanel(BasePanel):
axes = self.axes
if side == 'right':
axes = self.get_right_axes()
- self.conf.user_limits[axes] = limits
+ self.conf.user_limits[axes] = list(limits)
self.unzoom_all()
def set_viewlimits(self): | user limit cannot be tuple | newville_wxmplot | train | py |
a4fb5b93ba7f93ec01fd8e141c294f54842bd818 | diff --git a/lib/Timeline.php b/lib/Timeline.php
index <HASH>..<HASH> 100644
--- a/lib/Timeline.php
+++ b/lib/Timeline.php
@@ -253,4 +253,12 @@ class Timeline implements TimelineInterface
return $modified;
}
+
+ /**
+ * @return Collection
+ */
+ public function getVersions()
+ {
+ return $this->versions;
+ }
} | Exposed versions in the timeline through a getter. | baleen_migrations | train | php |
262e5377bf1ae013b50830da9c35c6bc568dba35 | diff --git a/suitable/module_runner.py b/suitable/module_runner.py
index <HASH>..<HASH> 100644
--- a/suitable/module_runner.py
+++ b/suitable/module_runner.py
@@ -331,5 +331,9 @@ class ModuleRunner(object):
'contacted': {
server: answer['result']
for server, answer in callback.contacted.items()
+ },
+ 'unreachable': {
+ server: result
+ for server, result in callback.unreachable.items()
}
}) | Add unreachable hosts to runner results | seantis_suitable | train | py |
e95cf300ba7e4ff0e01b0580e8832beaab757e40 | diff --git a/common/common-io/src/main/java/com/twelvemonkeys/io/enc/EncoderStream.java b/common/common-io/src/main/java/com/twelvemonkeys/io/enc/EncoderStream.java
index <HASH>..<HASH> 100644
--- a/common/common-io/src/main/java/com/twelvemonkeys/io/enc/EncoderStream.java
+++ b/common/common-io/src/main/java/com/twelvemonkeys/io/enc/EncoderStream.java
@@ -116,9 +116,7 @@ public final class EncoderStream extends FilterOutputStream {
}
else {
// Encode data already in the buffer
- if (bufferPos != 0) {
- encodeBuffer();
- }
+ encodeBuffer();
// Encode rest without buffering
encoder.encode(out, pBytes, pOffset, pLength); | Removed unnecessary (duplicate) if-statement. | haraldk_TwelveMonkeys | train | java |
e84881632c29ad170bae8a4e302bb07a1a21934d | diff --git a/src/scs_core/gas/pid/pid_calib.py b/src/scs_core/gas/pid/pid_calib.py
index <HASH>..<HASH> 100644
--- a/src/scs_core/gas/pid/pid_calib.py
+++ b/src/scs_core/gas/pid/pid_calib.py
@@ -84,7 +84,7 @@ class PIDCalib(SensorCalib):
@property
- def pid_sens_v_ppp(self):
+ def pid_sens_v_ppb(self):
return self.__pid_sens_mv_ppm / 1000000.0
diff --git a/src/scs_core/gas/pid/pid_datum.py b/src/scs_core/gas/pid/pid_datum.py
index <HASH>..<HASH> 100644
--- a/src/scs_core/gas/pid/pid_datum.py
+++ b/src/scs_core/gas/pid/pid_datum.py
@@ -82,7 +82,7 @@ class PIDDatum(JSONable):
offset_v = calib.pid_elc_mv / 1000.0
response_c = we_c - offset_v # remove electronic zero
- cnc = response_c / calib.pid_sens_v_ppp
+ cnc = response_c / calib.pid_sens_v_ppb # units are Volts / ppb
return cnc | Standardised naming for PID sensitivity | south-coast-science_scs_core | train | py,py |
9ae1af66e131b11abc3641e5aa473de2be493275 | diff --git a/ospd/command/command.py b/ospd/command/command.py
index <HASH>..<HASH> 100644
--- a/ospd/command/command.py
+++ b/ospd/command/command.py
@@ -19,7 +19,7 @@
import re
import subprocess
-from typing import Optional, Dict, Any, Union, Generator
+from typing import Optional, Dict, Any, Union, Iterator
from xml.etree.ElementTree import Element, SubElement
@@ -67,9 +67,7 @@ class BaseCommand(metaclass=InitSubclassMeta):
def get_elements(self) -> Optional[Dict[str, Any]]:
return self.elements
- def handle_xml(
- self, xml: Element
- ) -> Union[bytes, Generator[bytes, None, None]]:
+ def handle_xml(self, xml: Element) -> Union[bytes, Iterator[bytes]]:
raise NotImplementedError()
def as_dict(self):
@@ -291,7 +289,7 @@ class GetVts(BaseCommand):
'filter': 'Optional filter to get an specific vt collection.',
}
- def handle_xml(self, xml: Element) -> Generator[bytes, None, None]:
+ def handle_xml(self, xml: Element) -> Iterator[bytes]:
""" Handles <get_vts> command.
Writes the vt collection on the stream.
The <get_vts> element accept two optional arguments. | Change return types from Generator to Iterator
Actually Generator[bytes, None, None] is the same as Iterator[str]. | greenbone_ospd | train | py |
5ebbe1c3eec2bd3f37604627f3d4f53f5c899f48 | diff --git a/lib/moodlelib.php b/lib/moodlelib.php
index <HASH>..<HASH> 100644
--- a/lib/moodlelib.php
+++ b/lib/moodlelib.php
@@ -2692,6 +2692,10 @@ function require_login($courseorid = null, $autologinguest = true, $cm = null, $
$authplugin = get_auth_plugin($authname);
$authplugin->pre_loginpage_hook();
if (isloggedin()) {
+ if ($cm) {
+ $modinfo = get_fast_modinfo($course);
+ $cm = $modinfo->get_cm($cm->id);
+ }
break;
}
} | MDL-<I> auth: Refresh $cm in require_login after pre_loginpage_hook. | moodle_moodle | train | php |
a6f729ce6a4f7c4139b53a9c206b99c5c9c573f4 | diff --git a/taxtastic/taxonomy.py b/taxtastic/taxonomy.py
index <HASH>..<HASH> 100644
--- a/taxtastic/taxonomy.py
+++ b/taxtastic/taxonomy.py
@@ -92,7 +92,6 @@ class Taxonomy(object):
self.ranks.insert(self.ranks.index(parent_rank) + 1, rank)
self.rankset = set(self.ranks)
-
def _node(self, tax_id):
"""
Returns parent_id, rank
@@ -235,10 +234,10 @@ class Taxonomy(object):
def is_below(self, lower, upper):
return lower in self.ranks_below(upper)
- def ranks_below(self, rank):
+ def ranks_below(self, rank, depth=None):
below = []
try:
- below = self.ranks[self.ranks.index(rank):]
+ below = self.ranks[self.ranks.index(rank):depth]
except ValueError as err:
log.error(err)
return below | added a depth option to a subset of below ranks | fhcrc_taxtastic | train | py |
80e3aa1e151dd7f5878e1722f889f1f62955ea06 | diff --git a/core/src/utilities/helpers/iframe-helpers.js b/core/src/utilities/helpers/iframe-helpers.js
index <HASH>..<HASH> 100644
--- a/core/src/utilities/helpers/iframe-helpers.js
+++ b/core/src/utilities/helpers/iframe-helpers.js
@@ -240,7 +240,7 @@ class IframeHelpersClass {
'allow-modals', // Lets the resource open modal windows.
// 'allow-orientation-lock', // Lets the resource lock the screen orientation.
// 'allow-pointer-lock', // Lets the resource use the Pointer Lock API.
- 'allow-popups', // Allows popups (such as window.open(), target="_blank", or showModalDialog()). If this keyword is not used, the popup will silently fail to open.
+ 'allow-popups', // Allows popups (such as window.open(), _blank as target attribute, or showModalDialog()). If this keyword is not used, the popup will silently fail to open.
'allow-popups-to-escape-sandbox', // Lets the sandboxed document open new windows without those windows inheriting the sandboxing. For example, this can safely sandbox an advertisement without forcing the same restrictions upon the page the ad links to.
// 'allow-presentation', // Lets the resource start a presentation session.
'allow-same-origin', // If this token is not used, the resource is treated as being from a special origin that always fails the same-origin policy. | cm fix (#<I>) | kyma-project_luigi | train | js |
e0f03124debecfb7bc68a07f1aa592b788e596a0 | diff --git a/lib/logstasher/rails_ext/action_controller/metal/instrumentation.rb b/lib/logstasher/rails_ext/action_controller/metal/instrumentation.rb
index <HASH>..<HASH> 100644
--- a/lib/logstasher/rails_ext/action_controller/metal/instrumentation.rb
+++ b/lib/logstasher/rails_ext/action_controller/metal/instrumentation.rb
@@ -22,8 +22,6 @@ module ActionController
logstasher_add_custom_fields_to_request_context(LogStasher.request_context)
end
- result = super
-
if self.respond_to?(:logtasher_add_custom_fields_to_payload)
before_keys = raw_payload.keys.clone
logtasher_add_custom_fields_to_payload(raw_payload)
@@ -32,6 +30,8 @@ module ActionController
LogStasher.custom_fields += after_keys - before_keys
end
+ result = super
+
payload[:status] = response.status
append_info_to_payload(payload)
LogStasher.store.each do |key, value| | Call logtasher_add_custom_fields_to_payload in
advance, otherwise custom fields will not be appended when exceptions
are raised in Rails. | shadabahmed_logstasher | train | rb |
30c7fdb4d489c79ae24b613f48df10c174e20842 | diff --git a/dtool_irods/storagebroker.py b/dtool_irods/storagebroker.py
index <HASH>..<HASH> 100644
--- a/dtool_irods/storagebroker.py
+++ b/dtool_irods/storagebroker.py
@@ -255,7 +255,8 @@ class IrodsStorageBroker(object):
def _get_size_and_timestamp(self, irods_path):
if self._use_cache:
- return self._size_and_timestamp_cache[irods_path]
+ if irods_path in self._size_and_timestamp_cache:
+ return self._size_and_timestamp_cache[irods_path]
cmd = CommandWrapper(["ils", "-l", irods_path])
cmd()
@@ -267,6 +268,7 @@ class IrodsStorageBroker(object):
time_str = info[4]
dt = datetime.datetime.strptime(time_str, "%Y-%m-%d.%H:%M")
utc_timestamp = int(time.mktime(dt.timetuple()))
+
return size_in_bytes, utc_timestamp
@classmethod | Improve robustness of _get_size_and_timestamp with cache | jic-dtool_dtool-irods | train | py |
3435c537251596d57cf09264fcde22c819eb908a | diff --git a/lib/devise.rb b/lib/devise.rb
index <HASH>..<HASH> 100644
--- a/lib/devise.rb
+++ b/lib/devise.rb
@@ -13,7 +13,7 @@ module Devise
autoload :Base, 'devise/encryptors/base'
autoload :Bcrypt, 'devise/encryptors/bcrypt'
autoload :AuthlogicSha512, 'devise/encryptors/authlogic_sha512'
- autoload :AuthlogicSha1, 'devise/encryptors/authlogic_sha1'
+ autoload :ClearanceSha1, 'devise/encryptors/clearance_sha1'
autoload :RestfulAuthenticationSha1, 'devise/encryptors/restful_authentication_sha1'
autoload :Sha512, 'devise/encryptors/sha512'
autoload :Sha1, 'devise/encryptors/sha1' | Fix typo: autoload Clearance encryptor and not Authlogic one. | plataformatec_devise | train | rb |
8aa7d4b4633c0b4761b14e9f9d7702bcf90ed9b2 | diff --git a/cmd/syncthing/monitor.go b/cmd/syncthing/monitor.go
index <HASH>..<HASH> 100644
--- a/cmd/syncthing/monitor.go
+++ b/cmd/syncthing/monitor.go
@@ -38,8 +38,8 @@ var (
)
const (
- countRestarts = 5
- loopThreshold = 15 * time.Second
+ countRestarts = 4
+ loopThreshold = 60 * time.Second
)
func monitorMain() { | Lower the bar for when to stop restarting (fixes #<I>) | syncthing_syncthing | train | go |
6b3d93d9172c0c85cb8060afbdb9bff3673f7b4b | diff --git a/tldap/__init__.py b/tldap/__init__.py
index <HASH>..<HASH> 100644
--- a/tldap/__init__.py
+++ b/tldap/__init__.py
@@ -30,7 +30,7 @@ if not hasattr(django.conf.settings, 'LDAP'):
django.conf.settings.LDAP = {}
# ok to use django settings
-if not django.conf.settings.LDAP:
+if not django.conf.settings.LDAP and hasattr(django.conf.settings, 'LDAP_URL'):
django.conf.settings.LDAP[DEFAULT_LDAP_ALIAS] = {
'ENGINE': 'tldap.backend.fake_transactions',
'URI': django.conf.settings.LDAP_URL,
@@ -48,10 +48,6 @@ if not django.conf.settings.LDAP:
django.conf.settings.LDAP[DEFAULT_LDAP_ALIAS]["TLS_CA"] = (
django.conf.settings.LDAP_TLS_CA)
-if DEFAULT_LDAP_ALIAS not in django.conf.settings.LDAP:
- raise RuntimeError(
- "You must define a '%s' ldap database" % DEFAULT_LDAP_ALIAS)
-
connections = tldap.utils.ConnectionHandler(django.conf.settings.LDAP) | Check LDAP_URL is defined before trying to use it.
Make it possible to have 0 LDAP servers defined. | Karaage-Cluster_python-tldap | train | py |
6b3e6042a3fc13cc12c789cc86fe5a76e53517e1 | diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index <HASH>..<HASH> 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -6,10 +6,6 @@ unless RUBY_PLATFORM.include?("java")
SimpleCov.start
end
-# Pull in all of the gems including those in the `test` group
-require "bundler"
-Bundler.require
-
# Loading support files
Dir.glob(::File.expand_path("support/*.rb", __dir__)).each { |f| require_relative f }
Dir.glob(::File.expand_path("support/**/*.rb", __dir__)).each { |f| require_relative f } | Remove Bundler.require_relative from spec helper
Our code should require necessary dependencies explicitly. This also
avoids circular reference errors while loading specs | cucumber_aruba | train | rb |
681a263fc4a466b2adae98595f2d2c89b0e42d23 | diff --git a/lib/API.php b/lib/API.php
index <HASH>..<HASH> 100644
--- a/lib/API.php
+++ b/lib/API.php
@@ -27,12 +27,12 @@ class API
}
- public function send($email_name, $email_to, $data = array())
+ public function send($email_id, $email_to, $data = array())
{
$endpoint = "send";
$payload = array(
- "email_name" => $email_name,
+ "email_id" => $email_id,
"email_to" => $email_to,
"email_data" => $data
); | updated to use email_id instead of email_name | sendwithus_sendwithus_php | train | php |
b4bb1dbad00df202fe678fad64c60ceb8d318756 | diff --git a/lib/crawler/index.js b/lib/crawler/index.js
index <HASH>..<HASH> 100644
--- a/lib/crawler/index.js
+++ b/lib/crawler/index.js
@@ -68,7 +68,8 @@ function walkDir(dir, options, callback) {
}
var walker = walk.walk(dir, {});
- var pathSepRegex = new RegExp('^'+escapeStringRegexp(path.sep));
+ var startPathSepRegex = new RegExp('^'+escapeStringRegexp(path.sep));
+ var pathRepRegex = new RegExp(escapeStringRegexp(path.sep),'g');
walker.on('file', function (root, fileStats, next) {
var isRegion = false;
@@ -86,12 +87,13 @@ function walkDir(dir, options, callback) {
}
var filepath = path.join(root.replace(dir, ''), fileStats.name);
- if( filepath.match(pathSepRegex) ) {
- filepath = filepath.replace(pathSepRegex, '');
+ if( filepath.match(startPathSepRegex) ) {
+ filepath = filepath.replace(startPathSepRegex, '');
}
var ref = {
- $ref : filepath
+ // always save as unix style path
+ $ref : filepath.replace(pathRepRegex, '/')
};
if( isRegion ) { | always using unix style filepaths for index | ucd-cws_hobbes-network-format | train | js |
e8c6e505460f09a913fcdabbd133bc35b651f569 | diff --git a/upup/pkg/fi/cloudup/apply_cluster.go b/upup/pkg/fi/cloudup/apply_cluster.go
index <HASH>..<HASH> 100644
--- a/upup/pkg/fi/cloudup/apply_cluster.go
+++ b/upup/pkg/fi/cloudup/apply_cluster.go
@@ -302,10 +302,7 @@ func (c *ApplyClusterCmd) Run() error {
var nodeDockerConfig string
{
- secret, err := secretStore.Secret("nodedockercfg")
- if err != nil {
- return fmt.Errorf("error retrieving docker config %q: %v", secret, err)
- }
+ secret, _ := secretStore.Secret("nodedockercfg")
if secret != nil {
nodeDockerConfig, err = secret.AsString()
if err != nil { | Don't error when no docker config is supplied | kubernetes_kops | train | go |
f1f929d8416a3a16c89687ed6ab00f992a1d9aac | diff --git a/src/Galek/Utils/Calendar/Calendar.php b/src/Galek/Utils/Calendar/Calendar.php
index <HASH>..<HASH> 100644
--- a/src/Galek/Utils/Calendar/Calendar.php
+++ b/src/Galek/Utils/Calendar/Calendar.php
@@ -391,7 +391,7 @@ class Calendar extends DateTime{
}
/**
- * Get Easter Monday
+ * Get Easter
* @param type $rok
* @return type
*/ | CALENDAR: Change comment for Easter | JanGalek_netteCalendar | train | php |
3ada8a54e49ce207d897c82bc6e989ca4e2c576c | diff --git a/agent/bill/monitor.go b/agent/bill/monitor.go
index <HASH>..<HASH> 100644
--- a/agent/bill/monitor.go
+++ b/agent/bill/monitor.go
@@ -183,17 +183,16 @@ func (m *Monitor) VerifyChannelsForInactivity() error {
FROM channels
LEFT JOIN sessions ses
ON channels.id = ses.channel
-
LEFT JOIN offerings offer
ON channels.offering = offer.id
-
LEFT JOIN accounts acc
ON channels.agent = acc.eth_addr
WHERE channels.service_status IN ('pending', 'active', 'suspended')
AND channels.channel_status NOT IN ('pending')
AND acc.in_use
GROUP BY channels.id, offer.max_inactive_time_sec
- HAVING MAX(ses.last_usage_time) + (offer.max_inactive_time_sec * INTERVAL '1 second') < now();`
+ HAVING GREATEST(MAX(ses.last_usage_time), channels.service_changed_time) +
+ (offer.max_inactive_time_sec * INTERVAL '1 second') < now();`
return m.processEachChannel(query, m.terminateService)
} | fix VerifyChannelsForInactivity for no sessions | Privatix_dappctrl | train | go |
71dbfdb3a66db428539c821ac5b83bc428b86ee2 | diff --git a/src/connection.js b/src/connection.js
index <HASH>..<HASH> 100644
--- a/src/connection.js
+++ b/src/connection.js
@@ -27,6 +27,7 @@ var states = _.reduce([
'DISCONNECTED',
'CONNECTING',
'CONNECTED',
+ 'INTERRUPTED',
'RECONNECTING',
'DISCONNECTING',
], function(memo, state) {
@@ -193,6 +194,18 @@ var Connection = extend(Duplex, {
// nop
},
+ _setConnectionState: function(newState) {
+ if (this.connectionState !== newState) {
+ this.connectionState = newState;
+
+ this.rxBuffer = null;
+
+ if (EventEmitter.listenerCount(this, 'connectionState') > 0) {
+ this.emit('connectionState', newState);
+ }
+ }
+ },
+
send: function(data) {
if (data instanceof Header) {
data = data.toBuffer(); | - Added a new „interrupted“ connection state | danielwippermann_resol-vbus | train | js |
3421add94305792f3e13d3d6915bc4b257efc262 | diff --git a/test/base.py b/test/base.py
index <HASH>..<HASH> 100644
--- a/test/base.py
+++ b/test/base.py
@@ -33,7 +33,8 @@ class BaseTestCase(unittest.TestCase):
jsonschema.Draft7Validator(self.builder.to_schema()).validate(obj)
def assertResult(self, expected):
- self.assertEqual(self.builder.to_schema(), expected)
+ self.assertEqual(expected, self.builder.to_schema(),
+ 'Generated schema (below) does not match expected (above)')
self.assertUserContract()
def assertUserContract(self): | swap schema assertion order and add message for clearer debugging | wolverdude_GenSON | train | py |
32bea81ee3efb46fc5b6ea8146e108e9c0d270e0 | diff --git a/spec/support/shared_examples/numerical_submatcher.rb b/spec/support/shared_examples/numerical_submatcher.rb
index <HASH>..<HASH> 100644
--- a/spec/support/shared_examples/numerical_submatcher.rb
+++ b/spec/support/shared_examples/numerical_submatcher.rb
@@ -1,5 +1,3 @@
-require 'spec_helper'
-
shared_examples 'a numerical submatcher' do
it 'implements the with_message method' do
expect(subject).to respond_to(:with_message).with(1).arguments | Remove "circular require considered harmful" warning | thoughtbot_shoulda-matchers | train | rb |
7e193cb1f4ed7cd450099c26f75d83c0a6ea12b9 | diff --git a/O365/drive.py b/O365/drive.py
index <HASH>..<HASH> 100644
--- a/O365/drive.py
+++ b/O365/drive.py
@@ -1144,7 +1144,8 @@ class Folder(DriveItem):
else:
return items
- def upload_file(self, item, item_name=None, chunk_size=DEFAULT_UPLOAD_CHUNK_SIZE):
+ def upload_file(self, item, item_name=None, chunk_size=DEFAULT_UPLOAD_CHUNK_SIZE,
+ upload_in_chunks=False):
""" Uploads a file
:param item: path to the item you want to upload
@@ -1153,6 +1154,7 @@ class Folder(DriveItem):
:type item: str or Path
:param chunk_size: Only applies if file is bigger than 4MB.
Chunk size for uploads. Must be a multiple of 327.680 bytes
+ :param upload_in_chunks: force the method to upload the file in chunks
:return: uploaded file
:rtype: DriveItem
"""
@@ -1168,7 +1170,7 @@ class Folder(DriveItem):
file_size = item.stat().st_size
- if file_size <= UPLOAD_SIZE_LIMIT_SIMPLE:
+ if not upload_in_chunks and file_size <= UPLOAD_SIZE_LIMIT_SIMPLE:
# Simple Upload
url = self.build_url(
self._endpoints.get('simple_upload').format(id=self.object_id, | Drive: `upload_file` method can now force the upload type to be in chunks | O365_python-o365 | train | py |
9d732abbafdbc9c14840c4e483d3a52a7ed459bb | diff --git a/openstack_dashboard/dashboards/project/volumes/volumes/tables.py b/openstack_dashboard/dashboards/project/volumes/volumes/tables.py
index <HASH>..<HASH> 100644
--- a/openstack_dashboard/dashboards/project/volumes/volumes/tables.py
+++ b/openstack_dashboard/dashboards/project/volumes/volumes/tables.py
@@ -359,7 +359,7 @@ class AttachmentColumn(tables.Column):
"""
def get_raw_data(self, volume):
request = self.table.request
- link = _('Attached to %(instance)s on %(dev)s')
+ link = _('%(dev)s on %(instance)s')
attachments = []
# Filter out "empty" attachments which the client returns...
for attachment in [att for att in volume.attachments if att]: | Remove repetition of "Attached to" in table
When volume is attached to instances, there is duplicated word "Attached to"
in table header and table cell.
So I fixed words to "devices on instances" in table cell.
Change-Id: I<I>f<I>df<I>dbe<I>ea5eb<I>fc9d<I>a
Closes-Bug: #<I> | openstack_horizon | train | py |
18f16d61ff1ea3bfd42a5761acbcc05131008d25 | diff --git a/lib/moodlelib.php b/lib/moodlelib.php
index <HASH>..<HASH> 100644
--- a/lib/moodlelib.php
+++ b/lib/moodlelib.php
@@ -1740,7 +1740,10 @@ function authenticate_user_login($username, $password) {
// First try to find the user in the database
- $user = get_user_info_from_db('username', $username);
+ if (!$user = get_user_info_from_db('username', $username)) {
+ $user->id = 0; // Not a user
+ $user->auth = $CFG->auth;
+ }
// Sort out the authentication method we are using. | Fixes notices when username doesn't exist | moodle_moodle | train | php |
7fed8992e27ff243a6a726a336768485362ce608 | diff --git a/src/Mutation/DeleteNode.php b/src/Mutation/DeleteNode.php
index <HASH>..<HASH> 100644
--- a/src/Mutation/DeleteNode.php
+++ b/src/Mutation/DeleteNode.php
@@ -57,7 +57,10 @@ class DeleteNode extends AbstractMutationResolver
*/
public function onSubmit(FormEvent $event)
{
- if (!$event->getData() instanceof NodeInterface || !$event->getData()->getId()) {
+ $node = $this->context->getDefinition()->getNode();
+ $class = $this->context->getEndpoint()->getClassForType($node);
+
+ if (!$event->getData() instanceof NodeInterface || !$event->getData()->getId() || !is_a($event->getData(), $class)) {
throw new NodeNotFoundException();
}
} | Verify if given node is instance of expected node before deletion | ynloultratech_graphql-bundle | train | php |
c776152c727531905a5e17eb4f5ec34d185f5ee1 | diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -10,14 +10,13 @@ if svem_flag in sys.argv:
sys.argv.remove(svem_flag)
setup(name='calysto',
- version='0.9.3',
+ version='0.9.4',
description='Libraries and Languages for Python and IPython',
long_description="Libraries and Languages for IPython and Python",
author='Douglas Blank',
author_email='[email protected]',
url="https://github.com/Calysto/calysto",
- install_requires=['IPython', 'metakernel', 'svgwrite', 'Pillow',
- 'cairosvg'],
+ install_requires=['IPython', 'metakernel', 'svgwrite', 'cairosvg'],
packages=['calysto',
'calysto.util',
'calysto.widget', | Bump to <I>, release to pypi, removed Pillow from requirements | Calysto_calysto | train | py |
357b60bffa5d5f24f29f2edc4e3dbb517a8bd619 | diff --git a/shared/common-adapters/markdown.js b/shared/common-adapters/markdown.js
index <HASH>..<HASH> 100644
--- a/shared/common-adapters/markdown.js
+++ b/shared/common-adapters/markdown.js
@@ -34,7 +34,7 @@ const codeSnippetBlockStyle = {
paddingBottom: globalMargins.xtiny,
paddingLeft: globalMargins.tiny,
paddingRight: globalMargins.tiny,
- whiteSpace: 'pre',
+ whiteSpace: 'pre-wrap',
}
// Order matters, since we want to match the longer ticks first | we need code to wrap else we get horiz scroll bars (#<I>) | keybase_client | train | js |
f501f630199bc5930c7843296f02bbcd1b0fea6f | diff --git a/public/javascripts/katello.js b/public/javascripts/katello.js
index <HASH>..<HASH> 100644
--- a/public/javascripts/katello.js
+++ b/public/javascripts/katello.js
@@ -288,7 +288,7 @@ KT.common = (function() {
return root_url;
},
getSearchParams : function() {
- var search_string = $.bbq.getState('search')
+ var search_string = $.bbq.getState('search');
if( search_string ){
return { 'search' : search_string }; | fixed javascript errors due to bbq and incorrect includes in redhat_provider.html.haml | Katello_katello | train | js |
bc65f9236d1d6a6a740291f5c376cc6de8820a81 | diff --git a/amqp_connection/connection.py b/amqp_connection/connection.py
index <HASH>..<HASH> 100644
--- a/amqp_connection/connection.py
+++ b/amqp_connection/connection.py
@@ -22,6 +22,7 @@ class Connection:
self.load_configuration(config)
self.connect()
self.open_channel()
+ self.setup_queues()
self.start_consuming(in_queue)
def close(self): | Setup queues before starting to consume | FTV-Subtil_py_amqp_connection | train | py |
d8a447b97297a7d703c7255a4f0798f5d46a7b00 | diff --git a/code/model/ExternalContentSource.php b/code/model/ExternalContentSource.php
index <HASH>..<HASH> 100644
--- a/code/model/ExternalContentSource.php
+++ b/code/model/ExternalContentSource.php
@@ -36,6 +36,12 @@ class ExternalContentSource extends DataObject {
/**
+ * @var ArrayList - children
+ **/
+ private $children;
+
+
+ /**
* Get the object represented by an external ID
*
* All external content sources must override this
@@ -213,20 +219,18 @@ class ExternalContentSource extends DataObject {
* Handle a children call by retrieving from stageChildren
*/
public function Children() {
- static $children;
-
- if (!$children) {
- $children = new ArrayList();
+ if (!$this->children) {
+ $this->children = new ArrayList();
$kids = $this->stageChildren();
if ($kids) {
foreach ($kids as $child) {
if ($child->canView()) {
- $children->push($child);
+ $this->children->push($child);
}
}
}
}
- return $children;
+ return $this->children;
}
/** | bugfix store children of source as private var not static | nyeholt_silverstripe-external-content | train | php |
440b1a029915e64f46c0ca7452bff0f2c8b0527f | diff --git a/src/test/java/hex/DeepLearningProstateTest.java b/src/test/java/hex/DeepLearningProstateTest.java
index <HASH>..<HASH> 100644
--- a/src/test/java/hex/DeepLearningProstateTest.java
+++ b/src/test/java/hex/DeepLearningProstateTest.java
@@ -88,7 +88,7 @@ public class DeepLearningProstateTest extends TestUtil {
-2, //auto-tune
-1, //N epochs per iteration
0, //1 epoch per iteration
- rng.nextInt(100), // <1 epoch per iteration
+ rng.nextInt(200), // <1 epoch per iteration
500, //>1 epoch per iteration
}) {
DeepLearningModel model1 = null, model2 = null;
@@ -147,6 +147,8 @@ public class DeepLearningProstateTest extends TestUtil {
}
model1 = UKV.get(dest_tmp);
+ assert( ((p.train_samples_per_iteration <= 0 || p.train_samples_per_iteration >= frame.numRows()) && model1.epoch_counter > epochs)
+ || Math.abs(model1.epoch_counter - epochs)/epochs < 0.20 );
if (n_folds != 0)
// test HTML of cv models | Improve assertion for number of training rows.
Conflicts:
src/test/java/hex/DeepLearningProstateTest.java | h2oai_h2o-2 | train | java |
c34a4e7fa7764f81080cdcef332855ca9b7c128d | diff --git a/pkg/models/position/position_test.go b/pkg/models/position/position_test.go
index <HASH>..<HASH> 100644
--- a/pkg/models/position/position_test.go
+++ b/pkg/models/position/position_test.go
@@ -381,3 +381,26 @@ func TestUpdateFromRaw(t *testing.T) {
assert.Equal(t, expected, got)
assert.Equal(t, "pu", p.Type)
}
+
+func TestCancelFromRaw(t *testing.T) {
+ pld := []interface{}{
+ "tETHUST", "ACTIVE", 0.2, 153.71, 0, 0, -0.07944800000000068, -0.05855181835925015,
+ 67.52755254906451, 1.409288545397275, nil, 142420429, nil, nil, nil, 0, nil, 0, 0,
+ map[string]interface{}{
+ "reason": "TRADE",
+ "order_id": 34934099168,
+ "order_id_oppo": 34934090814,
+ "liq_stage": nil,
+ "trade_price": "153.71",
+ "trade_amount": "0.2",
+ },
+ }
+
+ expected := "position.Cancel"
+ p, err := position.CancelFromRaw(pld)
+ assert.Nil(t, err)
+
+ got := reflect.TypeOf(p).String()
+ assert.Equal(t, expected, got)
+ assert.Equal(t, "pc", p.Type)
+} | position raw data mapping to Cancel instance test coverage | bitfinexcom_bitfinex-api-go | train | go |
4fd08c8bfc6b1945d58f9f06c0a26130ae03a2a3 | diff --git a/moneyed/localization.py b/moneyed/localization.py
index <HASH>..<HASH> 100644
--- a/moneyed/localization.py
+++ b/moneyed/localization.py
@@ -46,7 +46,8 @@ class CurrencyFormatter(object):
if currency_code in local_set:
return local_set.get(currency_code)
else:
- return ('', " %s" % currency_code)
+ ret = self.sign_definitions.get(DEFAULT).get(currency_code)
+ return ret if ret is not None else ('', " %s" % currency_code)
def get_formatting_definition(self, locale):
if locale.upper() not in self.formatting_definitions: | Fall back to default currency sign
If current locale has no sign definition for a currency, fall back to
using the DEFAULT locale to try to get the sign definition. | limist_py-moneyed | train | py |
7c89b95c4123b225b281d0b4a8d5bf732bd1d523 | diff --git a/pyOCD/gdbserver/gdbserver.py b/pyOCD/gdbserver/gdbserver.py
index <HASH>..<HASH> 100644
--- a/pyOCD/gdbserver/gdbserver.py
+++ b/pyOCD/gdbserver/gdbserver.py
@@ -274,11 +274,11 @@ class GDBServer(threading.Thread):
# Use internal IO handler.
semihost_io_handler = semihost.InternalSemihostIOHandler()
- self.log.error("semihosting console = %s" % self.semihost_console_type)
if self.semihost_console_type == 'telnet':
self.telnet_console = semihost.TelnetSemihostIOHandler(self.telnet_port, self.serve_local_only)
semihost_console = self.telnet_console
else:
+ self.log.info("Semihosting will be output to console")
self.telnet_console = None
semihost_console = semihost_io_handler
self.semihost = semihost.SemihostAgent(self.target_context, io_handler=semihost_io_handler, console=semihost_console) | Change inappropriate error log to info log in gdb server.
- Log message reporting the semihosting console type was incorrectly
using an error log level. Changed only log when semihosting will be
output to the stdio console, and fixed log level. | mbedmicro_pyOCD | train | py |
73ff76b1c704da9ba55b9c3ae6280ff2b31de8fb | diff --git a/galpy/df_src/streamgapdf.py b/galpy/df_src/streamgapdf.py
index <HASH>..<HASH> 100644
--- a/galpy/df_src/streamgapdf.py
+++ b/galpy/df_src/streamgapdf.py
@@ -62,7 +62,7 @@ class streamgapdf(galpy.df_src.streamdf.streamdf):
nTrackChunksImpact= (floor(deltaAngleTrack/0.15)+1) number of chunks to divide the progenitor track in near the impact [similar to nTrackChunks]
- nKickPoints= (10xnTrackChunksImpact) number of points along the stream to compute the kicks at (kicks are then interpolated)
+ nKickPoints= (30xnTrackChunksImpact) number of points along the stream to compute the kicks at (kicks are then interpolated); '30' chosen such that higherorderTrack can be set to False and get calculations accurate to > 99%
nokicksetup= (False) if True, only run as far as setting up the coordinate transformation at the time of impact (useful when using this in streampepperdf)
@@ -449,7 +449,7 @@ class streamgapdf(galpy.df_src.streamdf.streamdf):
self._subhalovel= subhalovel
# First set nKickPoints
if nKickPoints is None:
- self._nKickPoints= 10*self._nTrackChunksImpact
+ self._nKickPoints= 30*self._nTrackChunksImpact
else:
self._nKickPoints= nKickPoints
# Sign of delta angle tells us whether the impact happens to the | Change number of points at which the kicks are computed | jobovy_galpy | train | py |
be4cd6ac3eba1e934705e4a3f04a494715928c35 | diff --git a/lib/yinx.rb b/lib/yinx.rb
index <HASH>..<HASH> 100644
--- a/lib/yinx.rb
+++ b/lib/yinx.rb
@@ -17,7 +17,7 @@ module Yinx
def download
config.note_filters.map do |filter|
- note_store.findNotes(filter).notes
+ note_store.findNotes(filter)
end.flatten
end
diff --git a/lib/yinx/note_store.rb b/lib/yinx/note_store.rb
index <HASH>..<HASH> 100644
--- a/lib/yinx/note_store.rb
+++ b/lib/yinx/note_store.rb
@@ -38,7 +38,15 @@ module Yinx
end
def findNotes opt = {}
- note_store.findNotesMetadata auth_token, filter(opt), 0, 100, spec(opt)
+ fl, start, ending, sp = filter(opt), 0, 250, spec(opt)
+ md_list = note_store.findNotesMetadata auth_token, fl, start, ending, sp
+ result = md_list.notes
+ while md_list.totalNotes > start + ending
+ start += ending
+ md_list = note_store.findNotesMetadata auth_token, fl, start, ending, sp
+ result.concat md_list.notes
+ end
+ result
end
private | load more notes if reach maxNotes | turnon_yinx | train | rb,rb |
5d6873bb84151f1aa217ba9e8028a16067adbed9 | diff --git a/salt/beacons/memusage.py b/salt/beacons/memusage.py
index <HASH>..<HASH> 100644
--- a/salt/beacons/memusage.py
+++ b/salt/beacons/memusage.py
@@ -11,6 +11,7 @@ Beacon to monitor memory usage.
from __future__ import absolute_import
import logging
import re
+from salt.ext.six.moves import map
# Import Third Party Libs
try: | One more python3 related fix | saltstack_salt | train | py |
af5e8078c152f38497d392f70fb174eaf87af104 | diff --git a/SteamCommunity.php b/SteamCommunity.php
index <HASH>..<HASH> 100644
--- a/SteamCommunity.php
+++ b/SteamCommunity.php
@@ -55,7 +55,8 @@ class SteamCommunity
/**
* Login with the set username and password.
* @return LoginResult
- * @throws \Exception
+ * @throws SteamException Thrown when Steam gives an unexpected response (e.g. Steam is down/having issues)
+ * @throws \Exception Thrown when cookiefile is unable to be created.
*/
public function doLogin()
{
@@ -132,8 +133,8 @@ class SteamCommunity
* Create a new Steam account.
* @param $email
* @return CreateAccountResult
- * @throws SteamException
- * @throws \Exception
+ * @throws SteamException Thrown when Steam gives an unexpected response (e.g. Steam is down/having issues)
+ * @throws \Exception Thrown when cookiefile is unable to be created.
*/
public function createAccount($email)
{
@@ -282,7 +283,7 @@ class SteamCommunity
* In most cases, you don't need to call this since an API key is registered automatically upon logging in as long as you have set the domain first.
* @param string $domain
* @return string
- * @throws SteamException
+ * @throws SteamException Thrown when Steam gives an unexpected response (e.g. Steam is down/having issues)
*/
public function registerApiKey($domain = '')
{ | Added missing thrown exception to PHPDoc for doLogin and updated throw descriptions | waylaidwanderer_PHP-SteamCommunity | train | php |
a0b0853a8ceee1f95f13de8f6a4aae5af18c00bb | diff --git a/src/lory.js b/src/lory.js
index <HASH>..<HASH> 100644
--- a/src/lory.js
+++ b/src/lory.js
@@ -178,7 +178,7 @@ export function lory (slider, opts) {
index = nextIndex;
}
- if (infinite && (Math.abs(nextOffset) === maxOffset || Math.abs(nextOffset) === 0)) {
+ if (infinite && (nextIndex === slides.length - infinite || nextIndex === 0)) {
if (direction) {
index = infinite;
} | fix: detect last slide by index instead of by offset | loryjs_lory | train | js |
3dcf298fed2d5fd65918dc560b3942b2aa0629e8 | diff --git a/btcec/signature.go b/btcec/signature.go
index <HASH>..<HASH> 100644
--- a/btcec/signature.go
+++ b/btcec/signature.go
@@ -427,9 +427,7 @@ func signRFC6979(privateKey *PrivateKey, hash []byte) (*Signature, error) {
k := nonceRFC6979(privkey.D, hash)
inv := new(big.Int).ModInverse(k, N)
r, _ := privkey.Curve.ScalarBaseMult(k.Bytes())
- if r.Cmp(N) == 1 {
- r.Sub(r, N)
- }
+ r.Mod(r, N)
if r.Sign() == 0 {
return nil, errors.New("calculated R is zero") | Fix one-off bug in signRFC<I> | btcsuite_btcd | train | go |
5eae3f27d5ee7a7ce76f7dae049dfab647d9788c | diff --git a/ksi-api/src/main/java/com/guardtime/ksi/unisignature/verifier/rules/ExtendedSignatureCalendarHashChainRightLinksMatchesRule.java b/ksi-api/src/main/java/com/guardtime/ksi/unisignature/verifier/rules/ExtendedSignatureCalendarHashChainRightLinksMatchesRule.java
index <HASH>..<HASH> 100644
--- a/ksi-api/src/main/java/com/guardtime/ksi/unisignature/verifier/rules/ExtendedSignatureCalendarHashChainRightLinksMatchesRule.java
+++ b/ksi-api/src/main/java/com/guardtime/ksi/unisignature/verifier/rules/ExtendedSignatureCalendarHashChainRightLinksMatchesRule.java
@@ -58,7 +58,7 @@ public class ExtendedSignatureCalendarHashChainRightLinksMatchesRule extends Bas
for (int i = 0; i < extendedSignatureRightLinks.size(); i++) {
CalendarHashChainLink link = extendedSignatureRightLinks.get(i);
CalendarHashChainLink initialLink = signatureRightLinks.get(i);
- if (!link.equals(initialLink)) {
+ if (!link.getDataHash().equals(initialLink.getDataHash())) {
LOGGER.info("Extended signature contains different calendar hash chain right link");
return VerificationResultCode.FAIL;
} | Compare right calendar hash chain links by their data hashes | guardtime_ksi-java-sdk | train | java |
433bd4c2130b2b42eb27b8d21fc096baea6d5195 | diff --git a/src/Runner/Runner.php b/src/Runner/Runner.php
index <HASH>..<HASH> 100644
--- a/src/Runner/Runner.php
+++ b/src/Runner/Runner.php
@@ -170,7 +170,6 @@ final class Runner
$old = file_get_contents($file->getRealPath());
$new = $old;
- $name = $this->getFileRelativePathname($file);
$appliedFixers = array(); | Fix: No need to determine relative file name twice | FriendsOfPHP_PHP-CS-Fixer | train | php |
67e86ed1b052dcc11e53b29de13347d41d17a4f8 | diff --git a/core/src/main/java/jenkins/model/Jenkins.java b/core/src/main/java/jenkins/model/Jenkins.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/jenkins/model/Jenkins.java
+++ b/core/src/main/java/jenkins/model/Jenkins.java
@@ -1818,10 +1818,6 @@ public class Jenkins extends AbstractCIBase implements DirectlyModifiableTopLeve
return viewGroupMixIn.getViews();
}
- public Collection<View> getAllViews() {
- return super.getAllViews();
- }
-
@Override
public void addView(View v) throws IOException {
viewGroupMixIn.addView(v); | :facepalm: now it's just noop code that should be removed | jenkinsci_jenkins | train | java |
52e4be72b1529816ed9375a9e5e03cad7592e4fe | diff --git a/fin.js b/fin.js
index <HASH>..<HASH> 100644
--- a/fin.js
+++ b/fin.js
@@ -267,7 +267,7 @@ var fin = module.exports = new (function(){
this._handleMessage = function(message) {
if (message.response) {
log('handle resonse', message.response)
- this._executeCallback(message.response, message)
+ this._executeCallback(message.response, message.data)
} else if (message.event == 'mutation') {
var mutation = JSON.parse(message.data)
log('handle mutation', mutation) | Responses carry the payload in the data property now | marcuswestin_fin | train | js |
eff85aa9f6a5df44b981c3e4dc9fec107ad3a0f4 | diff --git a/closure/goog/editor/plugins/firststrong.js b/closure/goog/editor/plugins/firststrong.js
index <HASH>..<HASH> 100644
--- a/closure/goog/editor/plugins/firststrong.js
+++ b/closure/goog/editor/plugins/firststrong.js
@@ -177,7 +177,7 @@ goog.editor.plugins.FirstStrong.prototype.getBlockAncestor_ = function() {
while (!goog.editor.plugins.FirstStrong.isBlock_(start)) {
start = start.parentNode;
}
- return /** @type {Element} */ start;
+ return /** @type {Element} */ (start);
}; | Fix invalid cast
R=nicksantos
DELTA=1 (0 added, 0 deleted, 1 changed)
Revision created by MOE tool push_codebase.
MOE_MIGRATION=<I>
git-svn-id: <URL> | google_closure-library | train | js |
7f1d6fd071aa530bd16d06c189030a4dd58b13ea | diff --git a/gulpfile.js b/gulpfile.js
index <HASH>..<HASH> 100644
--- a/gulpfile.js
+++ b/gulpfile.js
@@ -1,7 +1,8 @@
+'use strict'
+
var gulp = require('gulp'),
rimraf = require('rimraf'),
runSequence = require('run-sequence'),
- frontMatter = require('gulp-front-matter'),
markdown = require('gulp-markdown'),
highlight = require('gulp-highlight'),
autoprefixer = require('gulp-autoprefixer'),
@@ -10,7 +11,6 @@ var gulp = require('gulp'),
uglify = require('gulp-uglify'),
concat = require('gulp-concat'),
connect = require('gulp-connect'),
- path = require('path'),
modRewrite = require('connect-modrewrite'),
dynamicRouting = require('./bin/gulp-dynamic-routing'),
karma = require('gulp-karma'),
@@ -199,7 +199,7 @@ gulp.task('test', ['karma-test'], function() {
gulp.task('build', function(cb) {
runSequence('clean', ['copy', 'copy-partials', 'css', 'uglify'], 'copy-templates', function() {
- console.log("Successfully built.");
+ console.log('Successfully built.');
cb();
});
}); | a couple fixes for linter and unused vars
Looking forward to using this guys! Congrats on the release. | zurb_foundation-apps | train | js |
fb45a761963eb4aca3f93b0b1c28994d12b2ebd1 | diff --git a/geoviews/operation/projection.py b/geoviews/operation/projection.py
index <HASH>..<HASH> 100644
--- a/geoviews/operation/projection.py
+++ b/geoviews/operation/projection.py
@@ -161,7 +161,9 @@ class project_points(_project_operation):
return element.clone(crs=self.p.projection)
xdim, ydim = element.dimensions()[:2]
xs, ys = (element.dimension_values(i) for i in range(2))
- coordinates = self.p.projection.transform_points(element.crs, xs, ys)
+ coordinates = self.p.projection.transform_points(
+ element.crs, np.asarray(xs), np.asarray(ys)
+ )
mask = np.isfinite(coordinates[:, 0])
dims = [d for d in element.dimensions() if d not in (xdim, ydim)]
new_data = {k: v[mask] for k, v in element.columns(dims).items()} | Add support for pandas Float<I>Array (#<I>) | pyviz_geoviews | train | py |
f77a2e947cfe73eac04951decc72979c3040aab6 | diff --git a/server/workers/bro/bro_log_reader.py b/server/workers/bro/bro_log_reader.py
index <HASH>..<HASH> 100644
--- a/server/workers/bro/bro_log_reader.py
+++ b/server/workers/bro/bro_log_reader.py
@@ -33,16 +33,17 @@ class BroLogReader():
# First parse the header of the bro log
field_names, field_types = self._parse_bro_header(logfile)
- # Fixme: SO stupid to write a csv reader, but csv.DictReader on Bro
- # files was doing something weird with generator output that
- # affected zeroRPC and gave could not route _zpc_more error.
+ # Note: SO stupid to write a csv reader, but csv.DictReader on Bro
+ # files was doing something weird with generator output that
+ # affected zeroRPC and gave 'could not route _zpc_more' error.
+ # So wrote my own, put a sleep at the end, seems to fix it.
while 1:
_line = next(logfile)
- if _line.startswith('#close'):
+ if not _line.startswith('#close'):
+ yield self._cast_dict(dict(zip(field_names, _line.split(self.delimiter))))
+ else:
time.sleep(.1) # Give time for zeroRPC to finish messages
break
- else:
- yield self._cast_dict(dict(zip(field_names, _line.split(self.delimiter))))
def _parse_bro_header(self, logfile): | small change to if/else in bro_log_reader
Former-commit-id: <I>f<I>e6a<I>f<I>c<I>c4a1d<I>ed4c4 | SuperCowPowers_workbench | train | py |
44ef5f43a8ab47a633427cfc3832b178e7c045f5 | diff --git a/provider/lxd/lxdclient/config.go b/provider/lxd/lxdclient/config.go
index <HASH>..<HASH> 100644
--- a/provider/lxd/lxdclient/config.go
+++ b/provider/lxd/lxdclient/config.go
@@ -28,7 +28,7 @@ const (
// Config contains the config values used for a connection to the LXD API.
type Config struct {
// Namespace identifies the namespace to associate with containers
- // and other resources with which the client interacts. If may be
+ // and other resources with which the client interacts. It may be
// blank.
Namespace string | lxdclient: fix typo | juju_juju | train | go |
97466d3d540243702496b24426765f0022f4bc07 | diff --git a/runtime.js b/runtime.js
index <HASH>..<HASH> 100644
--- a/runtime.js
+++ b/runtime.js
@@ -145,14 +145,14 @@
var delegate = context.delegate;
if (delegate) {
if (method === "return" ||
- (method === "throw" && delegate.iterator.throw === undefined)) {
+ (method === "throw" && delegate.iterator[method] === undefined)) {
// A return or throw (when the delegate iterator has no throw
// method) always terminates the yield* loop.
context.delegate = null;
// If the delegate iterator has a return method, give it a
// chance to clean up.
- var returnMethod = delegate.iterator.return;
+ var returnMethod = delegate.iterator["return"];
if (returnMethod) {
var record = tryCatch(returnMethod, delegate.iterator, arg);
if (record.type === "throw") { | Use computed properties for reserved generator method names.
Fixes #<I>. | facebook_regenerator | train | js |
868e774268a185f51a6e448794958389f3fa3661 | diff --git a/src/Credentials/Credentials.php b/src/Credentials/Credentials.php
index <HASH>..<HASH> 100644
--- a/src/Credentials/Credentials.php
+++ b/src/Credentials/Credentials.php
@@ -130,6 +130,10 @@ class Credentials implements CredentialsInterface
: 'default';
}
+ if (!file_exists($filename)) {
+ throw new \RuntimeException("Credentials file not found: $filename");
+ }
+
if (!($data = parse_ini_file($filename, true))) {
throw new \RuntimeException('Invalid AWS credentials file: '
. $filename);
diff --git a/tests/Credentials/CredentialsTest.php b/tests/Credentials/CredentialsTest.php
index <HASH>..<HASH> 100644
--- a/tests/Credentials/CredentialsTest.php
+++ b/tests/Credentials/CredentialsTest.php
@@ -106,6 +106,18 @@ EOT;
/**
* @expectedException \RuntimeException
+ * @expectedExceptionMessage Credentials file not found:
+ */
+ public function testEnsuresIniFileExists()
+ {
+ unset($_SERVER[Credentials::ENV_KEY],
+ $_SERVER[Credentials::ENV_SECRET]);
+ $_SERVER['HOME'] = '/does/not/exist';
+ Credentials::fromIni();
+ }
+
+ /**
+ * @expectedException \RuntimeException
* @expectedExceptionMessage Invalid AWS credentials profile foo in
*/
public function testEnsuresProfileIsNotEmpty() | Adding check to ensure credentials file exists | aws_aws-sdk-php | train | php,php |
a4ac583671d4dfea3c7753aeea93c6a237449c13 | diff --git a/keepkeylib/transport_hid.py b/keepkeylib/transport_hid.py
index <HASH>..<HASH> 100644
--- a/keepkeylib/transport_hid.py
+++ b/keepkeylib/transport_hid.py
@@ -44,9 +44,11 @@ class HidTransport(Transport):
if (vendor_id, product_id) in DEVICE_IDS:
devices.setdefault(serial_number, [None, None])
- if interface_number == 0 or interface_number == -1: # normal link
+ print(path, interface_number)
+ if interface_number == 0 or (interface_number == -1 and path.endswith('0')): # normal link
devices[serial_number][0] = path
- elif interface_number == 1: # debug link
+ elif interface_number == 1 or (interface_number == -1 and path.endswith('1')): # debug link
+ print("we made it")
devices[serial_number][1] = path
else:
raise Exception("Unknown USB interface number: %d" % interface_number) | fix python hid for mac testing | keepkey_python-keepkey | train | py |
37f4ea18b4cf5ade1aaf5705755449d0322c327a | diff --git a/tests/browser/spec.js b/tests/browser/spec.js
index <HASH>..<HASH> 100644
--- a/tests/browser/spec.js
+++ b/tests/browser/spec.js
@@ -12,16 +12,6 @@ test('Spec handlebars.js', async ({ page, baseURL }) => {
await waitForMochaAndAssertResult(page);
});
-test('Spec handlebars.amd.js (AMD)', async ({ page, baseURL }) => {
- await page.goto(`${baseURL}/spec/amd.html?headless=true`);
- await waitForMochaAndAssertResult(page);
-});
-
-test('Spec handlebars.runtime.amd.js (AMD)', async ({ page, baseURL }) => {
- await page.goto(`${baseURL}/spec/amd-runtime.html?headless=true`);
- await waitForMochaAndAssertResult(page);
-});
-
test('Spec handlebars.js (UMD)', async ({ page, baseURL }) => {
await page.goto(`${baseURL}/spec/umd.html?headless=true`);
await waitForMochaAndAssertResult(page); | Remove AMD tests
AMD modules were removed in v5. | wycats_handlebars.js | train | js |
244174d2c0eaf3858903c41cbef8db9d6b7f6e9b | diff --git a/command/agent/local_test.go b/command/agent/local_test.go
index <HASH>..<HASH> 100644
--- a/command/agent/local_test.go
+++ b/command/agent/local_test.go
@@ -731,8 +731,7 @@ func TestAgentAntiEntropy_Check_DeferSync(t *testing.T) {
})
// Change the output in the catalog to force it out of sync.
- eCopy := new(structs.HealthCheck)
- *eCopy = *check
+ eCopy := check.Clone()
eCopy.Output = "changed"
reg := structs.RegisterRequest{
Datacenter: agent.config.Datacenter, | Uses the HealthCheck Clone() method in local_test.go. | hashicorp_consul | train | go |
cece9ca2a46a8d14cf85483ffe98ba5e2177ae06 | diff --git a/db/migrate/20131111233053_create_tenants.rb b/db/migrate/20131111233053_create_tenants.rb
index <HASH>..<HASH> 100644
--- a/db/migrate/20131111233053_create_tenants.rb
+++ b/db/migrate/20131111233053_create_tenants.rb
@@ -2,7 +2,7 @@ class CreateTenants < ActiveRecord::Migration
def change
create_table :tenants do |t|
t.string :name, limit: 50, null: false
- t.string :subdomain, limit: 20
+ t.string :subdomain, limit: 50
t.integer :parent_id
t.integer :lft
t.integer :rgt
diff --git a/db/schema.rb b/db/schema.rb
index <HASH>..<HASH> 100644
--- a/db/schema.rb
+++ b/db/schema.rb
@@ -48,7 +48,7 @@ ActiveRecord::Schema.define(version: 20131209200118) do
create_table "tenants", force: true do |t|
t.string "name", limit: 50, null: false
- t.string "subdomain", limit: 20
+ t.string "subdomain", limit: 50
t.integer "parent_id"
t.integer "lft"
t.integer "rgt" | Increase character limit for :subdomain in Tenant | cortex-cms_cortex | train | rb,rb |
607bf530921a1dcefee0a9be06624e56840239d9 | diff --git a/lib/assets/javascripts/jax/webgl/camera.js b/lib/assets/javascripts/jax/webgl/camera.js
index <HASH>..<HASH> 100644
--- a/lib/assets/javascripts/jax/webgl/camera.js
+++ b/lib/assets/javascripts/jax/webgl/camera.js
@@ -94,10 +94,7 @@ Jax.Camera = (function() {
self.stale = false;
var pos = storeVecBuf(self, POSITION);
- quat4.toMat4(self.rotation, self.matrices.mv);
- mat4.translate(self.matrices.mv, vec3.negate(pos), self.matrices.mv);
- mat4.inverse(self.matrices.mv);
-
+ mat4.fromRotationTranslation(self.rotation, pos, self.matrices.mv);
mat4.toInverseMat3(self.matrices.mv, self.matrices.n);
mat3.transpose(self.matrices.n); | Fix breakages introduced by updating gl-matrix | sinisterchipmunk_jax | train | js |
205c521092d5ad7b4f6ffc884966fb853445dbc9 | diff --git a/src/views/boom/editor/page/version/status.php b/src/views/boom/editor/page/version/status.php
index <HASH>..<HASH> 100644
--- a/src/views/boom/editor/page/version/status.php
+++ b/src/views/boom/editor/page/version/status.php
@@ -2,7 +2,7 @@
<h1><?= Lang::get('boom::settings.draft-status.heading') ?></h1>
<p><?= Lang::get('boom::settings.draft-status.intro') ?></p>
- <?php if ($version->isDraft()): ?>
+ <?php if ($version->isDraft() && !$version->isPendingApproval()): ?>
<p><?= Lang::get('boom::settings.draft-status.draft') ?></p>
<?php if (!$auth->loggedIn('publish_page', $page)): ?> | Fixed page draft status for version pending approval | boomcms_boom-core | train | php |
f321b7605b20abdca012d3da5f65b1bdc042a366 | diff --git a/threadedcomments/models.py b/threadedcomments/models.py
index <HASH>..<HASH> 100644
--- a/threadedcomments/models.py
+++ b/threadedcomments/models.py
@@ -8,16 +8,16 @@ PATH_DIGITS = getattr(settings, 'COMMENT_PATH_DIGITS', 10)
class ThreadedComment(Comment):
parent = models.ForeignKey('self', null=True, blank=True, default=None,
related_name='children')
- path = models.TextField(null=True, blank=True, db_index=True)
+ tree_path = models.TextField(null=True, blank=True, db_index=True)
def _get_depth(self):
- return len(self.path.split(PATH_SEPARATOR))
+ return len(self.tree_path.split(PATH_SEPARATOR))
depth = property(_get_depth)
def save(self, *args, **kwargs):
super(ThreadedComment, self).save(*args, **kwargs)
path_list = [unicode(self.pk).zfill(PATH_DIGITS)]
if self.parent:
- path_list.append(self.parent.path)
- self.path = PATH_SEPARATOR.join(path_list)
+ path_list.append(self.parent.tree_path)
+ self.tree_path = PATH_SEPARATOR.join(path_list)
super(ThreadedComment, self).save(*args, **kwargs) | Renamed path to tree_path | HonzaKral_django-threadedcomments | train | py |
2204b0637af79fb63f7cfa69a2e6733f4c506428 | diff --git a/combine/checks/links.py b/combine/checks/links.py
index <HASH>..<HASH> 100644
--- a/combine/checks/links.py
+++ b/combine/checks/links.py
@@ -26,6 +26,7 @@ class InternalLinkBrokenCheck(Check):
"http:",
"https:",
"tel:",
+ "sms:",
"mailto:",
"ftp:",
"file:", | Allow "sms://" links in the link checker | dropseed_combine | train | py |
4bfabc0323c137a04ec875c75ba40d68b2474c73 | diff --git a/src/Policies/MenuItemPolicy.php b/src/Policies/MenuItemPolicy.php
index <HASH>..<HASH> 100644
--- a/src/Policies/MenuItemPolicy.php
+++ b/src/Policies/MenuItemPolicy.php
@@ -37,7 +37,7 @@ class MenuItemPolicy extends BasePolicy
}
// If permission doesn't exist, we can't check it!
- if (!Voyager::model('Permission')->whereName('browse_'.$slug)->exists()) {
+ if (!Voyager::model('Permission')->whereKey('browse_'.$slug)->exists()) {
return true;
} | MenuItemPolicy accesses key instead of name (#<I>)
* MenuItemPolicy access now the key column instead of the name column from table permissions
* adjusted code to style guidelines
* made command more succinct | the-control-group_voyager | train | php |
a2aa7a31a6da16d6b7046bfbe920b0ee2658eab4 | diff --git a/tests/webdriver/unit/insert.rb b/tests/webdriver/unit/insert.rb
index <HASH>..<HASH> 100644
--- a/tests/webdriver/unit/insert.rb
+++ b/tests/webdriver/unit/insert.rb
@@ -3,9 +3,16 @@ require "minitest/pride"
require "minitest/reporters"
MiniTest::Reporters.use!
-describe "Insert" do
- it "should prepend to document" do
- true
+
+describe "Test Insert" do
+ before do
+ editor_url = "file://#{File.join(File.expand_path(__FILE__),
+ '../../../..', 'build/tests/webdriver.html')}"
+ @driver = ScribeDriver.create_scribe_driver(:chrome, editor_url)
+ @editor = @driver.find_element(:class, "editor")
+ @adapter = WebdriverAdapter.new @driver, @editor
+ @adapter.focus()
+ ScribeDriver.js_set_doc_delta(@driver)
end
it "should append to document" do | Added setup to run before each test. | quilljs_quill | train | rb |
67d0de48d9a76e082c0efab2a7fad981bc5f72a3 | diff --git a/tests/test_presenter.py b/tests/test_presenter.py
index <HASH>..<HASH> 100644
--- a/tests/test_presenter.py
+++ b/tests/test_presenter.py
@@ -1,5 +1,4 @@
import pytest
-from textwrap import dedent
from auto_changelog.domain_model import Changelog, default_issue_pattern
from auto_changelog.presenter import MarkdownPresenter | refactor: Remove unused import from test #<I> | Michael-F-Bryan_auto-changelog | train | py |
57435c04fde7d82202aaf47b641137cf37b302fd | diff --git a/alburnum/maas/skin.py b/alburnum/maas/skin.py
index <HASH>..<HASH> 100644
--- a/alburnum/maas/skin.py
+++ b/alburnum/maas/skin.py
@@ -3,15 +3,6 @@
"""Shell for interacting with a remote MAAS (https://maas.ubuntu.com/)."""
-from __future__ import (
- absolute_import,
- print_function,
- unicode_literals,
-)
-
-str = None
-
-__metaclass__ = type
__all__ = [
"Shell",
] | Remove vestigial __future__ imports and suchlike. | maas_python-libmaas | train | py |
9cccb80915525871e9982e14503a39246ff40f51 | diff --git a/lib/racecar.rb b/lib/racecar.rb
index <HASH>..<HASH> 100644
--- a/lib/racecar.rb
+++ b/lib/racecar.rb
@@ -7,6 +7,7 @@ require "racecar/null_instrumenter"
require "racecar/consumer"
require "racecar/consumer_set"
require "racecar/runner"
+require "racecar/parallel_runner"
require "racecar/config"
require "racecar/version"
require "ensure_hash_compact"
@@ -51,6 +52,12 @@ module Racecar
end
def self.run(processor)
- Runner.new(processor, config: config, logger: logger, instrumenter: instrumenter).run
+ runner = Runner.new(processor, config: config, logger: logger, instrumenter: instrumenter)
+
+ if config.parallel_workers && config.parallel_workers > 1
+ ParallelRunner.new(runner: runner, config: config, logger: logger).run
+ else
+ runner.run
+ end
end
end | Use parallel runner when workers exceed 1 | zendesk_racecar | train | rb |
1e352658598dec76dc6014a9970953372041e35b | diff --git a/src/utils/mergeTree.js b/src/utils/mergeTree.js
index <HASH>..<HASH> 100644
--- a/src/utils/mergeTree.js
+++ b/src/utils/mergeTree.js
@@ -118,7 +118,7 @@ export async function mergeTree({
theirName,
mergeDriver,
}).then(r => {
- cleanMerge = r.cleanMerge
+ cleanMerge = cleanMerge && r.cleanMerge
unmergedFiles.push(filepath)
return r.mergeResult
}) | fix(merge): ensure correct value of cleanMerge (#<I>) | isomorphic-git_isomorphic-git | train | js |
19d491c0076d31ea11c40cb947c1eb3c615cd8c7 | diff --git a/src/is-link-broken.js b/src/is-link-broken.js
index <HASH>..<HASH> 100644
--- a/src/is-link-broken.js
+++ b/src/is-link-broken.js
@@ -54,7 +54,7 @@ module.exports = ({
// Allow links to elements on the same page
if (fragment && !uri.path()) {
- return options.checkAnchors && !fileHasTarget(filesToTargets, linkPath.filename, fragment)
+ return options.checkAnchors && !fileHasTarget(filesToTargets, filename, fragment)
}
// Add baseURL in here so that the linkPath resolves to it in the case of | Fixed wrong variable name when accessing current link filename | davidxmoody_metalsmith-broken-link-checker | train | js |
fda76cca253f4db974a71ec194d715872e3f0bce | diff --git a/ks-client/src/main/java/eu/fbk/knowledgestore/client/Client.java b/ks-client/src/main/java/eu/fbk/knowledgestore/client/Client.java
index <HASH>..<HASH> 100644
--- a/ks-client/src/main/java/eu/fbk/knowledgestore/client/Client.java
+++ b/ks-client/src/main/java/eu/fbk/knowledgestore/client/Client.java
@@ -542,8 +542,9 @@ public final class Client extends AbstractKnowledgeStore {
// Encode timeout
if (timeout != null) {
- actualQuery = Strings.isNullOrEmpty(actualQuery) ? "?timeout=" + timeout
- : actualQuery + "&timeout=" + timeout;
+ final long timeoutInSeconds = Math.max(1, timeout / 1000);
+ actualQuery = Strings.isNullOrEmpty(actualQuery) ? "?timeout=" + timeoutInSeconds
+ : actualQuery + "&timeout=" + timeoutInSeconds;
}
// Determine Accept MIME type based on expected (Java) response type | HTTP calls by KS client not supply the timeout in seconds, consistently with unit accepted by server | dkmfbk_knowledgestore | train | java |
5e92e0fb4a4e4bac8a7dcb49922ba635f6070178 | diff --git a/aws/resource_aws_pinpoint_email_channel_test.go b/aws/resource_aws_pinpoint_email_channel_test.go
index <HASH>..<HASH> 100644
--- a/aws/resource_aws_pinpoint_email_channel_test.go
+++ b/aws/resource_aws_pinpoint_email_channel_test.go
@@ -16,6 +16,7 @@ func TestAccAWSPinpointEmailChannel_basic(t *testing.T) {
resource.ParallelTest(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t); testAccPreCheckAWSPinpointApp(t) },
+ErrorCheck: testAccErrorCheck(t, pinpoint.EndpointsID),
IDRefreshName: resourceName,
Providers: testAccProviders,
CheckDestroy: testAccCheckAWSPinpointEmailChannelDestroy, | tests/r/pinpoint_email_channel: Add ErrorCheck | terraform-providers_terraform-provider-aws | train | go |
f08dd53540350617fb2a63717bf2f000d3af81c1 | diff --git a/lib/puppet/util/rdoc.rb b/lib/puppet/util/rdoc.rb
index <HASH>..<HASH> 100644
--- a/lib/puppet/util/rdoc.rb
+++ b/lib/puppet/util/rdoc.rb
@@ -24,9 +24,10 @@ module Puppet::Util::RDoc
# specify our own format & where to output
options = [ "--fmt", "puppet",
- "--quiet",
- "--exclude", "/modules/[^/]*/files/.*\.pp$",
- "--op", outputdir ]
+ "--quiet",
+ "--exclude", "/modules/[^/]*/files/.*$",
+ "--exclude", "/modules/[^/]*/templates/.*$",
+ "--op", outputdir ]
options << "--force-update" if Options::OptionList.options.any? { |o| o[0] == "--force-update" }
options += [ "--charset", charset] if charset | (#<I>) Update puppet doc to ignore files and templates in modules
Without this commit puppet doc will parse some files inside of the
files/ and templates/ sub directory of a module. This is a problem
because these files are not meant to contain puppet code.
Conflicts:
lib/puppet/util/rdoc.rb | puppetlabs_puppet | train | rb |
5c7466b8b68e064e778197b516bb8b24829605a2 | diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -18,14 +18,23 @@ tests_require = [
setup(name='nensbuild',
version=version,
- description="TODO",
+ description="One step buildout build.",
long_description=long_description,
# Get strings from http://www.python.org/pypi?%3Aaction=list_classifiers
- classifiers=[],
+ classifiers=[
+ 'Development Status :: 4 - Beta'
+ 'Environment :: Console',
+ 'Intended Audience :: Developers',
+ 'License :: OSI Approved :: BSD License',
+ 'Operating System :: Unix',
+ 'Programming Language :: Python :: 2.6',
+ 'Programming Language :: Python :: 2.7',
+ 'Topic :: Software Development :: Build Tools',
+ ],
keywords=[],
author='Roland van Laar',
author_email='[email protected]',
- url='',
+ url='http://github.com/nens/nensbuild',
license='BSD',
packages=['nensbuild'],
include_package_data=True, | Add clasifiers and short description. | nens_nensbuild | train | py |
57acd4c7782b565cc2849776ec9733a80893b16d | diff --git a/fastlane/lib/fastlane/cli_tools_distributor.rb b/fastlane/lib/fastlane/cli_tools_distributor.rb
index <HASH>..<HASH> 100644
--- a/fastlane/lib/fastlane/cli_tools_distributor.rb
+++ b/fastlane/lib/fastlane/cli_tools_distributor.rb
@@ -122,8 +122,6 @@ module Fastlane
UI.important "After creating the Gemfile and Gemfile.lock, commit those files into version control"
end
UI.important "Get started using a Gemfile for fastlane https://docs.fastlane.tools/getting-started/ios/setup/#use-a-gemfile"
-
- sleep 2 # napping is life, otherwise the user might not see this message
end
# Returns an array of symbols for the available lanes for the Fastfile | Don't make Fastlane seem like it's 2 seconds slower than it actually is (#<I>) | fastlane_fastlane | train | rb |
6d14be9d914f9d5cebf66b0916e2d9bc14cf4862 | diff --git a/accessible/workspace-tree.component.js b/accessible/workspace-tree.component.js
index <HASH>..<HASH> 100644
--- a/accessible/workspace-tree.component.js
+++ b/accessible/workspace-tree.component.js
@@ -158,6 +158,8 @@ blocklyApp.WorkspaceTreeComponent = ng.core
// screenreader focus for the destination tree to the block just moved.
var newBlockId = null;
+ this.treeService.clearActiveDesc(this.tree.id);
+
// If the connection is a 'previousConnection' and that connection is
// already joined to something, use the 'nextConnection' of the
// previous block instead in order to do an insertion.
@@ -172,8 +174,9 @@ blocklyApp.WorkspaceTreeComponent = ng.core
// Invoke a digest cycle, so that the DOM settles.
var that = this;
setTimeout(function() {
+ // Move the focus to the current tree.
+ document.getElementById(that.tree.id).focus();
// Move the screenreader focus to the newly-pasted block.
- that.treeService.clearActiveDesc(that.tree.id);
that.treeService.setActiveDesc(newBlockId + 'blockRoot', that.tree.id);
});
}, | Always remove screenreader focus from block before pasting to one of its connections. | LLK_scratch-blocks | train | js |
e95c137c076c79a725892a5f7794b3f5da507341 | diff --git a/lib/websession_webinterface.py b/lib/websession_webinterface.py
index <HASH>..<HASH> 100644
--- a/lib/websession_webinterface.py
+++ b/lib/websession_webinterface.py
@@ -53,7 +53,7 @@ class WebInterfaceYourAccountPages(WebInterfaceDirectory):
_force_https = True
def index(self, req, form):
- redirect_to_url(req, '/youraccount/display')
+ redirect_to_url(req, '%s/youraccount/display' % sweburl)
def display(self, req, form):
args = wash_urlargd(form, {}) | Use absolute URL when calling redirect_to_url(), as the installation may be
done into a subdir. | inveniosoftware_invenio-accounts | train | py |
ec734026cc1ee5c5ffff437cb61fde84371e28c4 | diff --git a/py/apigen/apigen.py b/py/apigen/apigen.py
index <HASH>..<HASH> 100644
--- a/py/apigen/apigen.py
+++ b/py/apigen/apigen.py
@@ -47,7 +47,6 @@ def build(pkgdir, dsa, capture):
apb.build_namespace_pages(ns_data, proj)
capture.err.writeorg('building class pages\n')
apb.build_class_pages(class_data, proj)
- apb.build_method_pages(method_data, proj)
capture.err.writeorg('building function pages\n')
apb.build_function_pages(func_data, proj)
capture.err.writeorg('building source pages\n') | [svn r<I>] Fixed problem probably caused by removing some comment or something...
--HG--
branch : trunk | vmalloc_dessert | train | py |
486ded3fcae5aac100b49e49ef55192053e15807 | diff --git a/testing/acceptance_test.py b/testing/acceptance_test.py
index <HASH>..<HASH> 100644
--- a/testing/acceptance_test.py
+++ b/testing/acceptance_test.py
@@ -808,7 +808,6 @@ class TestDurations(object):
result.stdout.fnmatch_lines_random(
["*durations*", "*call*test_3*", "*call*test_2*"]
)
- assert "test_something" not in result.stdout.str()
result.stdout.fnmatch_lines(
["(0.00 durations hidden. Use -vv to show these durations.)"]
) | Fix flaky durations test
Unfortunately due to fluctuations in runtime "test_something"
might still appear in the final message.
Example failure:
<URL> | pytest-dev_pytest | train | py |
f697364778cbba9db4b13820a2f33b2f36e5c442 | diff --git a/blueflood-core/src/main/java/com/rackspacecloud/blueflood/service/BluefloodServiceStarterException.java b/blueflood-core/src/main/java/com/rackspacecloud/blueflood/service/BluefloodServiceStarterException.java
index <HASH>..<HASH> 100644
--- a/blueflood-core/src/main/java/com/rackspacecloud/blueflood/service/BluefloodServiceStarterException.java
+++ b/blueflood-core/src/main/java/com/rackspacecloud/blueflood/service/BluefloodServiceStarterException.java
@@ -6,6 +6,11 @@ public class BluefloodServiceStarterException extends RuntimeException {
this.status = status;
}
+ public BluefloodServiceStarterException(int status, String message, Throwable cause) {
+ super(message, cause);
+
+ this.status = status;
+ }
int status;
public int getStatus() { | Optionally include a cause of the exception. | rackerlabs_blueflood | train | java |
5fb7299a34cbf75921d390e5f1ea08d2af735bee | diff --git a/ember_debug/data_debug.js b/ember_debug/data_debug.js
index <HASH>..<HASH> 100644
--- a/ember_debug/data_debug.js
+++ b/ember_debug/data_debug.js
@@ -90,10 +90,21 @@ var DataDebug = Ember.Object.extend(PortMixin, {
wrapRecord: function(record) {
var objectId = Ember.guidFor(record.object);
+ var self = this;
+ var columnValues = {};
+ var searchKeywords = [];
this.sentRecords[objectId] = record;
+ // make objects clonable
+ for (var i in record.columnValues) {
+ columnValues[i] = this.get('objectInspector').inspect(record.columnValues[i]);
+ }
+ // make sure keywords can be searched and clonable
+ searchKeywords = Ember.A(record.searchKeywords).filter(function(keyword) {
+ return (typeof keyword === 'string' || typeof keyword === 'number');
+ });
return {
- columnValues: record.columnValues,
- searchKeywords: record.searchKeywords,
+ columnValues: columnValues,
+ searchKeywords: searchKeywords,
filterValues: record.filterValues,
color: record.color,
objectId: objectId | Make sure record data is clonable before sending it
Fixes #<I> | emberjs_ember-inspector | train | js |
4b9c32943bcb8680acebe7885d90d0b85e96e540 | diff --git a/dwm/test/test_copy_value.py b/dwm/test/test_copy_value.py
index <HASH>..<HASH> 100644
--- a/dwm/test/test_copy_value.py
+++ b/dwm/test/test_copy_value.py
@@ -0,0 +1,34 @@
+""" test copy value case of derive function"""
+
+import mongomock
+
+from dwm import Dwm
+
+
+# Setup mongomock db
+DB = mongomock.MongoClient().db
+
+
+# Setup Dwm instance
+FIELDS = {
+ "field1": {
+ "lookup": [],
+ "derive": [
+ {
+ "type": "copyValue",
+ "fieldSet": ["field2"],
+ "options": ["overwrite"]
+ }
+ ]
+ }
+}
+
+DWM = Dwm(name='test', mongo=DB, fields=FIELDS)
+
+
+# Let the testing begin
+def test_derive_copy_value():
+ """ Ensure derive copy value """
+ rec = {"emailAddress": "[email protected]", "field1": "", "field2": "newvalue"}
+ rec_out, _ = DWM._derive(rec, {}) # pylint: disable=W0212
+ assert rec_out['field1'] == 'newvalue' | add tests for DWM class drive method cover copyValue case | rh-marketingops_dwm | train | py |
704b3eefe848a8f5482d3c069132cf33cfca9ed8 | diff --git a/src/cli.js b/src/cli.js
index <HASH>..<HASH> 100644
--- a/src/cli.js
+++ b/src/cli.js
@@ -131,7 +131,7 @@ function coverCmd(opts) {
writeFileSync(file, JSON.stringify(cov), 'utf8');
});
- if (config.instrumentation.preloadSources()) {
+ if (config.instrumentation.includeAllSources()) {
matchFn.files.forEach(function (file) {
if (opts.verbose) { console.error('Preload ' + file); }
try { | fix: support istanbul@<I> `--include-all-sources` option | douglasduteil_isparta | train | js |
fe404e1dcc916e0dcd492b49fff6c7adc4381fad | diff --git a/secureheader.go b/secureheader.go
index <HASH>..<HASH> 100644
--- a/secureheader.go
+++ b/secureheader.go
@@ -105,7 +105,7 @@ func (c *Config) ServeHTTP(w http.ResponseWriter, r *http.Request) {
if c.ContentTypeOptions {
w.Header().Set("X-Content-Type-Options", "nosniff")
}
- if c.HSTS && r.URL.Scheme == "https" {
+ if c.HSTS && c.isHTTPS(r) {
v := "max-age=" + strconv.FormatInt(int64(c.HSTSMaxAge/time.Second), 10)
if c.HSTSIncludeSubdomains {
v += "; includeSubDomains" | properly check for https during HSTS insertion
We want consistent behavior for deciding whether the
current connetion is using https, so we should use the
same logic as the https redirect check. | kr_secureheader | train | go |
db14891e0ac56c546481c3693d3829ce71a353ff | diff --git a/lib/npr/entity/story.rb b/lib/npr/entity/story.rb
index <HASH>..<HASH> 100644
--- a/lib/npr/entity/story.rb
+++ b/lib/npr/entity/story.rb
@@ -86,9 +86,7 @@ module NPR
ATTR_AS_IS = [
"thumbnail",
"parent",
- "container",
- "text",
- "textWithHtml"
+ "container"
]
attr_accessor *ATTR_AS_IS
attr_accessor :id, :text, :textWithHtml | Remove text nodes from deprecated ATTR_AS_IS | bricker_npr | train | rb |
d35ffb156568ed1294b9b9961e1c54f48b36ce3c | diff --git a/test/form/sourcemaps-inline/_config.js b/test/form/sourcemaps-inline/_config.js
index <HASH>..<HASH> 100644
--- a/test/form/sourcemaps-inline/_config.js
+++ b/test/form/sourcemaps-inline/_config.js
@@ -1,5 +1,6 @@
module.exports = {
description: 'correct sourcemaps are written (inline)',
+ skipIfWindows: true,
options: {
sourceMap: 'inline'
}
diff --git a/test/form/sourcemaps/_config.js b/test/form/sourcemaps/_config.js
index <HASH>..<HASH> 100644
--- a/test/form/sourcemaps/_config.js
+++ b/test/form/sourcemaps/_config.js
@@ -1,5 +1,6 @@
module.exports = {
description: 'correct sourcemaps are written (separate file)',
+ skipIfWindows: true,
options: {
sourceMap: true
}
diff --git a/test/test.js b/test/test.js
index <HASH>..<HASH> 100644
--- a/test/test.js
+++ b/test/test.js
@@ -221,6 +221,8 @@ describe( 'rollup', function () {
var config = require( FORM + '/' + dir + '/_config' );
+ if ( config.skipIfWindows && process.platform === 'win32' ) return;
+
var options = extend( {}, config.options, {
entry: FORM + '/' + dir + '/main.js'
}); | skip tests on windows, as applicable. closes #<I> | rollup_rollup | train | js,js,js |
6706f5d782c0bf062242088e9944188e9fb515c7 | diff --git a/app/models/glue/pulp/repos.rb b/app/models/glue/pulp/repos.rb
index <HASH>..<HASH> 100644
--- a/app/models/glue/pulp/repos.rb
+++ b/app/models/glue/pulp/repos.rb
@@ -76,9 +76,7 @@ module Glue::Pulp::Repos
# cache repos so we can cache lazy_accessors
@repo_cache ||= {}
- @repo_cache[env.id] ||= Repository.joins(:environment_product).where(
- "environment_products.product_id" => self.id,
- "environment_products.environment_id" => env)
+ @repo_cache[env.id] ||= env.default_content_view.repos_in_product(env, self)
if include_disabled
@repo_cache[env.id] | content views - update product.repos to handle default content view
Minor update to the product.repos so that when invoked, it will
return the repos that are associated with the default content
view. Previously, it would return all repos (including those
that are contained in other published content views) | Katello_katello | train | rb |
b928518952d8df04848844d1a9838a4080b0414e | diff --git a/openquake/engine/db/models.py b/openquake/engine/db/models.py
index <HASH>..<HASH> 100644
--- a/openquake/engine/db/models.py
+++ b/openquake/engine/db/models.py
@@ -217,7 +217,7 @@ def build_curves(rlz, curves_by_trt_model_gsim):
# number of TrtModels
curves = 0
for art in AssocLtRlzTrtModel.objects.filter(rlz=rlz):
- pnes = 1. - curves_by_trt_model_gsim[art.trt_model_id, art.gsim]
+ pnes = 1. - curves_by_trt_model_gsim.get((art.trt_model_id, art.gsim), 0)
curves = 1. - (1. - curves) * pnes
return curves | Fixed bug reported by INGV Pisa
Former-commit-id: fede<I>d<I>f<I>fbd<I>bc<I>ef5dd4d [formerly <I>f<I>b9d<I>ee<I>b6eef7c<I>c5ff]
Former-commit-id: <I>be0bc<I>f<I>efb<I>dab<I>eb<I>d9 | gem_oq-engine | train | py |
dc836bafda9d05b0bb6bc01826dccd1ded58c915 | diff --git a/lib/Github/Api/Organization/Teams.php b/lib/Github/Api/Organization/Teams.php
index <HASH>..<HASH> 100644
--- a/lib/Github/Api/Organization/Teams.php
+++ b/lib/Github/Api/Organization/Teams.php
@@ -83,8 +83,12 @@ class Teams extends AbstractApi
return $this->get('teams/'.rawurlencode($team).'/repos/'.rawurlencode($username).'/'.rawurlencode($repository));
}
- public function addRepository($team, $username, $repository)
+ public function addRepository($team, $username, $repository, array $params)
{
+ if (isset($params['permission']) && !in_array($params['permission'], array('pull', 'push', 'admin'))) {
+ $params['permission'] = 'pull';
+ }
+
return $this->put('teams/'.rawurlencode($team).'/repos/'.rawurlencode($username).'/'.rawurlencode($repository));
} | Allow Adding Repo With Permissions | KnpLabs_php-github-api | train | php |
53f8b3a46d61215b4411339c4f38f23d45eda98c | diff --git a/concrete/src/Form/Service/Form.php b/concrete/src/Form/Service/Form.php
index <HASH>..<HASH> 100644
--- a/concrete/src/Form/Service/Form.php
+++ b/concrete/src/Form/Service/Form.php
@@ -511,10 +511,11 @@ class Form
}
$str .= '</select>';
if ($configuration['linkStateProvinceField']) {
+ $escapedID = preg_replace('/[!"#$%&\'()*+,.\\/:;<=>?@\\[\\]^`{|}~\\\\]/', '\\\\$0', $id);
$r = ResponseAssetGroup::get();
$r->requireAsset('core/country-stateprovince-link');
$str .= '<script>';
- $str .= '$(document).ready(function() { ccmCountryStateprovinceLink.withCountryField($(' . json_encode('#' . $id) . ')); });';
+ $str .= '$(document).ready(function() { ccmCountryStateprovinceLink.withCountryField($(' . json_encode('#' . $escapedID) . ')); });';
$str .= '</script>';
} | Escape the Country field ID for jQuery selector
So that it can contain special characters like [] | concrete5_concrete5 | train | php |
Subsets and Splits