hash
stringlengths 40
40
| diff
stringlengths 131
26.7k
| message
stringlengths 7
694
| project
stringlengths 5
67
| split
stringclasses 1
value | diff_languages
stringlengths 2
24
|
---|---|---|---|---|---|
3e0ddb2b87d92df209147a8bb3f4e2c98bf00a05
|
diff --git a/core/test/regression/migrations/migration_spec.js b/core/test/regression/migrations/migration_spec.js
index <HASH>..<HASH> 100644
--- a/core/test/regression/migrations/migration_spec.js
+++ b/core/test/regression/migrations/migration_spec.js
@@ -275,7 +275,7 @@ describe('Database Migration (special functions)', function () {
result.roles.at(7).get('name').should.eql('Scheduler Integration');
// Permissions
- result.permissions.length.should.eql(71);
+ result.permissions.length.should.eql(72);
result.permissions.toJSON().should.be.CompletePermissions();
});
});
|
Fixed fixtures permission count in migration test
no issue
- We added 2 new permissions(backupContent and publish) to fixtures recently but the count was only increased by 1 in tests
|
TryGhost_Ghost
|
train
|
js
|
5409e5e31b00f05d4e706d2086a3205974a25886
|
diff --git a/activerecord/test/schema/schema.rb b/activerecord/test/schema/schema.rb
index <HASH>..<HASH> 100644
--- a/activerecord/test/schema/schema.rb
+++ b/activerecord/test/schema/schema.rb
@@ -814,9 +814,10 @@ ActiveRecord::Schema.define do
t.index :id, unique: true
end
- create_table :subscribers, force: true do |t|
+ create_table :subscribers, id: false, force: true do |t|
t.string :nick, null: false
t.string :name
+ t.integer :id
t.integer :books_count, null: false, default: 0
t.integer :update_count, null: false, default: 0
t.index :nick, unique: true
|
Use nullable `id` column instead of a primary key
`id` column in `subscribers` was added as a primary key for ignorable in
INSERT. But it caused `NotNullViolation` for oracle-enhanced adapter.
<URL>
|
rails_rails
|
train
|
rb
|
1ca226c130595450669cc7bf4db70d2cc7b6877d
|
diff --git a/lib/ajax/block_classes.js b/lib/ajax/block_classes.js
index <HASH>..<HASH> 100644
--- a/lib/ajax/block_classes.js
+++ b/lib/ajax/block_classes.js
@@ -243,18 +243,20 @@ block_class.prototype.toggle_hide = function(e, target, isCosmetic) {
block_class.prototype.delete_button = function() {
-
- //remove from local model
+ // Remove from local model.
if (main.rightcolumn.has_block(this)) {
main.rightcolumn.remove_block(this);
} else if (main.leftcolumn.has_block(this)) {
main.leftcolumn.remove_block(this);
}
- //remove from remote model
- main.connect('DELETE','class=block&instanceId='+this.instanceId);
+ // Remove block from the drag and drop group in YUI.
+ this.removeFromGroup('blocks');
+
+ // Remove from remote model.
+ main.connect('DELETE', 'class=block&instanceId='+this.instanceId);
- //remove from view
- main.blocks[main.get_block_index(this)] = null;
+ // Remove from view
+ main.blocks.splice(main.get_block_index(this), 1);
this.getEl().parentNode.removeChild(this.getEl());
if (this.debug) {
|
Merged from <I>.
|
moodle_moodle
|
train
|
js
|
a9f80ccd1ca9d765be2c1b0ddeb6aea3e2c4bab9
|
diff --git a/test/test_mdf.py b/test/test_mdf.py
index <HASH>..<HASH> 100644
--- a/test/test_mdf.py
+++ b/test/test_mdf.py
@@ -234,8 +234,8 @@ class TestMDF(unittest.TestCase):
self.assertTrue(equal)
- @unittest.skip
- def test_cut_absolute_array(self):
+
+ def _test_cut_absolute_array(self):
print("MDF cut absolute array tests")
for mdfname in os.listdir('tmpdir_array'):
@@ -345,8 +345,7 @@ class TestMDF(unittest.TestCase):
self.assertTrue(equal)
- @unittest.skip
- def test_filter_array(self):
+ def _test_filter_array(self):
print("MDF filter array tests")
for mdfname in os.listdir('tmpdir_array'):
|
don't use @unittest.skip decorator since it fails the travis CI build on python <I>
|
danielhrisca_asammdf
|
train
|
py
|
acb420a8792052507d024bf409b34ef1d2644522
|
diff --git a/nfc/tag/tt1.py b/nfc/tag/tt1.py
index <HASH>..<HASH> 100644
--- a/nfc/tag/tt1.py
+++ b/nfc/tag/tt1.py
@@ -241,7 +241,9 @@ class Type1Tag(object):
@property
def is_present(self):
"""Returns True if the tag is still within communication range."""
- try: return len(self.transceive("\x78\x00\x00"+self.uid)) == 6
+ try:
+ data = self.transceive("\x78\x00\x00"+self.uid)
+ return data and len(data) == 6
except nfc.clf.DigitalProtocolError: return False
def transceive(self, data, timeout=0.1):
|
increased robustness against driver returning None from exchange
|
nfcpy_nfcpy
|
train
|
py
|
e8c1d9231ea53c71072651c781529a6a158b021b
|
diff --git a/hotdoc/parsers/gtk_doc.py b/hotdoc/parsers/gtk_doc.py
index <HASH>..<HASH> 100644
--- a/hotdoc/parsers/gtk_doc.py
+++ b/hotdoc/parsers/gtk_doc.py
@@ -356,9 +356,12 @@ class GtkDocParser:
actual_parameters = OrderedDict({})
for param in parameters:
if is_section:
- if param.name.lower().replace('_', '-') in [
- 'symbols', 'private-symbols', 'auto-sort']:
+ cleaned_up_name = param.name.lower().replace('_', '-')
+ if cleaned_up_name in ['symbols', 'private-symbols', 'auto-sort', 'sources']:
meta.update(self.__parse_yaml_comment(param, filename))
+ if cleaned_up_name == 'sources':
+ sources_paths = [os.path.abspath(os.path.join(os.path.dirname(filename), path)) for path in meta[cleaned_up_name]]
+ meta[cleaned_up_name] = sources_paths
else:
meta[param.name] = param.description
else:
|
gtkparser: Make paths in sources absolute
|
hotdoc_hotdoc
|
train
|
py
|
a90403e17a1671f8a38e58581e9a9199f2a252dd
|
diff --git a/Classes/Domain/Repository/SitemapRepository.php b/Classes/Domain/Repository/SitemapRepository.php
index <HASH>..<HASH> 100644
--- a/Classes/Domain/Repository/SitemapRepository.php
+++ b/Classes/Domain/Repository/SitemapRepository.php
@@ -79,6 +79,9 @@ class SitemapRepository
*/
public function findAllPages()
{
+ if (empty($this->pluginConfig['1']['urlEntries.']['pages'])) {
+ return [];
+ }
$pages = $this->getPages();
$urlEntries = $this->getEntriesFromPages($pages);
|
[BUGFIX] Exclude pages from result if deactivated
Resolves: #4
|
beardcoder_sitemap_generator
|
train
|
php
|
d7c11fce05c1aad1f52623130d595155d7231058
|
diff --git a/packages/openneuro-server/libs/mongo.js b/packages/openneuro-server/libs/mongo.js
index <HASH>..<HASH> 100644
--- a/packages/openneuro-server/libs/mongo.js
+++ b/packages/openneuro-server/libs/mongo.js
@@ -41,6 +41,7 @@ export default {
datasets: null,
snapshots: null,
stars: null,
+ keys: null,
},
scitran: {
projects: null,
|
add "keys" collection to the server-side mongo config
|
OpenNeuroOrg_openneuro
|
train
|
js
|
b77f30f2786d7ed94d60d30b7716bc73170db4b5
|
diff --git a/lib/mailman/router.rb b/lib/mailman/router.rb
index <HASH>..<HASH> 100644
--- a/lib/mailman/router.rb
+++ b/lib/mailman/router.rb
@@ -20,7 +20,7 @@ module Mailman
def initialize
@routes = []
- @params = {}
+ @params = Hash.new { |hash,key| hash[key.to_sym] if key.respond_to?(:to_sym) } # allows indifferent access via string and symbol
end
# Adds a route to the router.
diff --git a/spec/mailman/router_spec.rb b/spec/mailman/router_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/mailman/router_spec.rb
+++ b/spec/mailman/router_spec.rb
@@ -33,6 +33,12 @@ describe Mailman::Router do
@router.route('test1')
end
+ it 'should set the params helper to a indifferent hash' do
+ @route1.block = lambda { params[:test].should == 'test'
+ params['test'].should == 'test' }
+ @router.route('test1')
+ end
+
describe 'array' do
before do
|
Add indifferent access to params hash
|
mailman_mailman
|
train
|
rb,rb
|
69b0ce87ceaa060de014a86b0f578f2677b870fe
|
diff --git a/openpnm/utils/petsc.py b/openpnm/utils/petsc.py
index <HASH>..<HASH> 100644
--- a/openpnm/utils/petsc.py
+++ b/openpnm/utils/petsc.py
@@ -11,6 +11,7 @@ from openpnm.core import Base
from openpnm.utils import logging
logger = logging.getLogger(__name__)
import petsc4py
+# Next line must be before importing PETSc
petsc4py.init(sys.argv)
from petsc4py import PETSc
|
Add essential comment to petsc.py regarding the order of imports, ci min
|
PMEAL_OpenPNM
|
train
|
py
|
12d610cb4d411ac6dece2b9f78849232e512d0e2
|
diff --git a/msaf/input_output.py b/msaf/input_output.py
index <HASH>..<HASH> 100644
--- a/msaf/input_output.py
+++ b/msaf/input_output.py
@@ -190,8 +190,8 @@ def find_estimation(jam, boundaries_id, labels_id, params):
ann = jam.search(namespace=namespace).\
search(**{"Sandbox.boundaries_id": boundaries_id}).\
search(**{"Sandbox.labels_id": lambda x:
- isinstance(x, six.string_types) and
- re.match(labels_id, x) is not None})
+ (isinstance(x, six.string_types) and
+ re.match(labels_id, x) is not None) or x is None})
for key, val in zip(params.keys(), params.values()):
if isinstance(val, six.string_types):
ann = ann.search(**{"Sandbox.%s" % key: val})
|
Logic to read JAMS was wrong. Hoping to fix #<I>
Former-commit-id: b<I>b8e1e1fcb4c<I>ddae<I>a2fc3a7e
Former-commit-id: 5f5b3bf<I>a<I>f<I>fdf4d<I>db5b0e6c<I>
|
urinieto_msaf
|
train
|
py
|
9977b93e667b4d989a1f2d2d746597e1593325bb
|
diff --git a/client/login/controller.js b/client/login/controller.js
index <HASH>..<HASH> 100644
--- a/client/login/controller.js
+++ b/client/login/controller.js
@@ -18,7 +18,6 @@ import MagicLogin from './magic-login';
import WPLogin from './wp-login';
import { fetchOAuth2ClientData } from 'state/oauth2-clients/actions';
import { getCurrentUser, getCurrentUserLocale } from 'state/current-user/selectors';
-import { recordTracksEventWithClientId as recordTracksEvent } from 'state/analytics/actions';
const enhanceContextWithLogin = context => {
const { params: { flow, isJetpack, socialService, twoFactorAuthType }, path } = context;
@@ -57,10 +56,6 @@ export function login( context, next ) {
const redirectQueryString = parse( parsedRedirectUrl.query );
if ( client_id !== redirectQueryString.client_id ) {
- context.store.dispatch(
- recordTracksEvent( 'calypso_login_phishing_attempt', context.query )
- );
-
const error = new Error(
'The `redirect_to` query parameter is invalid with the given `client_id`.'
);
|
Remove event that won't be triggered
|
Automattic_wp-calypso
|
train
|
js
|
04b5b38182fa498c79a847fc78648164b81a6bc8
|
diff --git a/safe/engine/core.py b/safe/engine/core.py
index <HASH>..<HASH> 100644
--- a/safe/engine/core.py
+++ b/safe/engine/core.py
@@ -91,7 +91,7 @@ def calculate_impact(layers, impact_fcn,
return F
-def check_data_integrity(layer_objects, tolerance=):
+def check_data_integrity(layer_objects):
"""Check list of layer objects
Input
|
Tolerance in check_data_integrity is not yet configurable
|
inasafe_inasafe
|
train
|
py
|
0f4815e0ece0dabf81b9254f3f0c7666b4acb1a3
|
diff --git a/pyemma/util/tests/test_config.py b/pyemma/util/tests/test_config.py
index <HASH>..<HASH> 100644
--- a/pyemma/util/tests/test_config.py
+++ b/pyemma/util/tests/test_config.py
@@ -6,6 +6,7 @@ Created on 11.06.2015
import warnings
import unittest
import os
+import sys
from pyemma.util.config import readConfiguration
from pyemma.util.files import TemporaryDirectory
@@ -13,6 +14,7 @@ from pyemma.util.files import TemporaryDirectory
class TestConfig(unittest.TestCase):
+ @unittest.skipIf(sys.platform == 'win32', 'unix based test')
def test_can_not_create_cfg_dir(self):
os.environ['HOME'] = '/dev/null'
@@ -28,6 +30,7 @@ class TestConfig(unittest.TestCase):
assert issubclass(w[-1].category, UserWarning)
assert "could not create" in str(w[-1].message)
+ @unittest.skipIf(sys.platform == 'win32', 'unix based test')
def test_non_writeable_cfg_dir(self):
with TemporaryDirectory() as tmp:
|
[util/test_config] run tests not for win<I> platforms.
|
markovmodel_PyEMMA
|
train
|
py
|
3fb68709ee430dd500be31da0d2ddf865adb7cb0
|
diff --git a/src/Dot.php b/src/Dot.php
index <HASH>..<HASH> 100644
--- a/src/Dot.php
+++ b/src/Dot.php
@@ -2,6 +2,8 @@
namespace Enzyme\Freckle;
+use InvalidArgumentException;
+
class Dot
{
public function get($collection, $path)
@@ -11,7 +13,7 @@ class Dot
if (null === $dotter) {
$type = gettype($collection);
- throw new Exception(
+ throw new InvalidArgumentException(
"Freckle doesn't know how to process the " .
"collection of type [{$type}]"
);
|
Refactor thrown Exception to InvalidArgumentException on unsupported collection types.
|
enzyme_freckle
|
train
|
php
|
3a4a39433093dbd6008954128a8a9f52dd9d0143
|
diff --git a/src/scene.js b/src/scene.js
index <HASH>..<HASH> 100755
--- a/src/scene.js
+++ b/src/scene.js
@@ -936,8 +936,6 @@ Scene.prototype.removeTile = function (key) {
tile.remove(this);
}
- this.workers.forEach(x => WorkerBroker.postMessage(x, 'removeTile', key));
-
this.forgetTile(tile.key);
this.dirty = true;
};
diff --git a/src/tile.js b/src/tile.js
index <HASH>..<HASH> 100755
--- a/src/tile.js
+++ b/src/tile.js
@@ -87,10 +87,6 @@ export default class Tile {
}
}
this.meshes = {};
-
- if (this.request != null) {
- this.request.abort();
- }
}
destroy() {
|
Update based on feedback from the Pull Request
|
tangrams_tangram
|
train
|
js,js
|
13baa686fc5af661ef4162f781dcc97e108f2875
|
diff --git a/modules/app/html/progress-notifier.js b/modules/app/html/progress-notifier.js
index <HASH>..<HASH> 100644
--- a/modules/app/html/progress-notifier.js
+++ b/modules/app/html/progress-notifier.js
@@ -35,8 +35,8 @@ exports.create = function (api) {
}
})
- var hidden = computed([progress.incomplete, progress.feeds, queryProgress.pending], (incomplete, feeds, indexing) => {
- return incomplete <= 10 && indexing <= 10 && feeds
+ var hidden = computed([progress.incomplete, queryProgress.pending], (incomplete, indexing) => {
+ return incomplete <= 10 && indexing <= 10
})
var hasDownloadProgress = computed([progress.feeds, progress.incomplete], (feeds, incomplete) => {
@@ -52,7 +52,7 @@ exports.create = function (api) {
['Downloading new messages', h('progress', { style: {'margin-left': '10px'}, min: 0, max: 1, value: downloadProgress })],
when(queryProgress.pending, [
['Indexing database', h('progress', { style: {'margin-left': '10px'}, min: 0, max: 1, value: indexProgress })]
- ], 'Checking for changes...')
+ ], 'Scuttling...')
)
])
])
|
don't display progress for "checking", only indexing and downloading
|
ssbc_patchwork
|
train
|
js
|
7221c776a768c50a3338bf47c8d0fefcd1a43b05
|
diff --git a/client/src/main/java/com/paypal/selion/reports/reporter/runtimereport/JsonRuntimeReporterHelper.java b/client/src/main/java/com/paypal/selion/reports/reporter/runtimereport/JsonRuntimeReporterHelper.java
index <HASH>..<HASH> 100644
--- a/client/src/main/java/com/paypal/selion/reports/reporter/runtimereport/JsonRuntimeReporterHelper.java
+++ b/client/src/main/java/com/paypal/selion/reports/reporter/runtimereport/JsonRuntimeReporterHelper.java
@@ -172,11 +172,6 @@ public class JsonRuntimeReporterHelper {
return;
}
- if (result.getStatus() == ITestResult.SKIP) {
- appendFile(jsonCompletedTest, test1.toJson().concat(",\n"));
- return;
- }
-
for (TestMethodInfo temp : runningTest) {
if (temp.getResult().equals(result)) {
runningTest.remove(temp);
|
Skipped tests need to be removed from running tests
Runtime Reporter: skipped tests need to be removed from running tests
list so that the report doesn't leave skipped tests marked as running.
|
paypal_SeLion
|
train
|
java
|
32f467cf1398811d20e0823cd253f67a09c377fd
|
diff --git a/js/binance.js b/js/binance.js
index <HASH>..<HASH> 100644
--- a/js/binance.js
+++ b/js/binance.js
@@ -830,7 +830,11 @@ module.exports = class binance extends Exchange {
let success = this.safeValue (response, 'success', true);
if (!success) {
if ('msg' in response)
- response = JSON.parse (response['msg']);
+ try {
+ response = JSON.parse (response['msg']);
+ } catch (e) {
+ response = '';
+ }
}
// checks against error codes
let error = this.safeString (response, 'code');
|
sometimes this fails due to binance sending invalid json like ''
|
ccxt_ccxt
|
train
|
js
|
200781c5105140df32b8e18bbec497cc0be5d40e
|
diff --git a/graphyte.py b/graphyte.py
index <HASH>..<HASH> 100644
--- a/graphyte.py
+++ b/graphyte.py
@@ -20,7 +20,7 @@ import time
__all__ = ['Sender', 'init', 'send']
-__version__ = '1.4'
+__version__ = '1.5'
default_sender = None
logger = logging.getLogger(__name__)
|
Bump up version number (with tag support)
|
Jetsetter_graphyte
|
train
|
py
|
3401072368e1fd52908142d2f94e50c9545ba847
|
diff --git a/core/src/main/java/org/acegisecurity/context/SecurityContext.java b/core/src/main/java/org/acegisecurity/context/SecurityContext.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/org/acegisecurity/context/SecurityContext.java
+++ b/core/src/main/java/org/acegisecurity/context/SecurityContext.java
@@ -17,6 +17,8 @@ package net.sf.acegisecurity.context;
import net.sf.acegisecurity.Authentication;
+import java.io.Serializable;
+
/**
* Interface defining the minimum security information associated with the
@@ -29,7 +31,7 @@ import net.sf.acegisecurity.Authentication;
* @author Ben Alex
* @version $Id$
*/
-public interface SecurityContext {
+public interface SecurityContext extends Serializable {
//~ Methods ================================================================
/**
|
Made Serializable as per acegisecurity-developer list discussion on <I> May <I>.
|
spring-projects_spring-security
|
train
|
java
|
1ad14cc7515e0a14c1d70d5d2bfd5bc0cece0514
|
diff --git a/sfsimodels/__about__.py b/sfsimodels/__about__.py
index <HASH>..<HASH> 100644
--- a/sfsimodels/__about__.py
+++ b/sfsimodels/__about__.py
@@ -1,4 +1,4 @@
__project__ = "sfsimodels"
__author__ = "Maxim Millen"
-__version__ = "0.9.9"
+__version__ = "0.9.10"
__license__ = "MIT"
diff --git a/sfsimodels/models/soils.py b/sfsimodels/models/soils.py
index <HASH>..<HASH> 100644
--- a/sfsimodels/models/soils.py
+++ b/sfsimodels/models/soils.py
@@ -1236,7 +1236,7 @@ class SoilProfile(PhysicalObject):
raise ValueError("Cannot compute vertical effective stress at depth: {0}".format(cum_thickness))
value = sf.get_value_of_a_get_method(sl, fn0, extras={"saturated": saturated,
'v_eff_stress': v_eff})
- elif fn1:
+ elif hasattr(sl, fn1):
value = sf.get_value_of_a_get_method(sl, fn1, extras={"saturated": saturated})
elif hasattr(sl, item):
value = getattr(sl, item)
|
* Fixed bug with soil_profile.split
|
eng-tools_sfsimodels
|
train
|
py,py
|
cc3f2d4ac24fe41d9663f6ccf7c98be5e18f1df9
|
diff --git a/modules_v3/gedcom_favorites/module.php b/modules_v3/gedcom_favorites/module.php
index <HASH>..<HASH> 100644
--- a/modules_v3/gedcom_favorites/module.php
+++ b/modules_v3/gedcom_favorites/module.php
@@ -157,7 +157,7 @@ class gedcom_favorites_WT_Module extends WT_Module implements WT_Module_Block {
$record=WT_GedcomRecord::getInstance($favorite['gid']);
if ($record && $record->canDisplayDetails()) {
if ($record->getType()=='INDI') {
- $content .= "<div id=\"box".$favorite["gid"].".0\" class=\"person_box";
+ $content .= "<div id=\"box".$favorite["gid"].".0\" class=\"person_box action_header";
switch($record->getsex()) {
case 'M':
break;
|
Bug <I> - Additional correction to Mypage favorites for FAB theme
|
fisharebest_webtrees
|
train
|
php
|
098986bcd8627fc35ac2cf43f58c2974412da714
|
diff --git a/src/Entity/Base.php b/src/Entity/Base.php
index <HASH>..<HASH> 100644
--- a/src/Entity/Base.php
+++ b/src/Entity/Base.php
@@ -2,7 +2,7 @@
use GameScan\WoW\WowApiRequest;
-class Base
+abstract class Base
{
/**
@@ -14,4 +14,6 @@ class Base
{
$this->apiRequest = $apiRequest;
}
+
+ abstract public function getRessource();
}
|
base class is now abstract and has a method to get the ressource to grab
|
Game-scan_WoW
|
train
|
php
|
bccb1b920348e646495e30dc385b9a34ab74937f
|
diff --git a/rpcserver.go b/rpcserver.go
index <HASH>..<HASH> 100644
--- a/rpcserver.go
+++ b/rpcserver.go
@@ -382,7 +382,7 @@ func (r *rpcServer) WalletBalance(ctx context.Context,
rpcsLog.Debugf("[walletbalance] balance=%v", balance)
- return &lnrpc.WalletBalanceResponse{float64(balance)}, nil
+ return &lnrpc.WalletBalanceResponse{balance.ToBTC()}, nil
}
// PendingChannels returns a list of all the channels that are currently
|
rpc: ensure wallet balances are returned in units of BTC
|
lightningnetwork_lnd
|
train
|
go
|
7aad713224e34b9debc98df3dec469f2b55c1f5b
|
diff --git a/Spartacus/Database.py b/Spartacus/Database.py
index <HASH>..<HASH> 100644
--- a/Spartacus/Database.py
+++ b/Spartacus/Database.py
@@ -130,6 +130,8 @@ class DataTable(object):
v_diff = []
for c in self.Columns:
if r1[c] != r2[c]:
+ print('HERE 1 {0}, {1}'.format(type(r1[c]), str(r1[c])))
+ print('HERE 2 {0}, {1}'.format(type(r2[c]), str(r2[c])))
v_row.append('{0} --> {1}'.format(r1[c], r2[c]))
v_diff.append(c)
v_allmatch = False
@@ -2559,7 +2561,10 @@ class Oracle(Generic):
self.v_con.close()
self.v_con = None
except cx_Oracle.Error as exc:
- raise Spartacus.Database.Exception(str(exc))
+ if str(exc) != 'DPI-1040: LOB was already closed'
+ raise Spartacus.Database.Exception(str(exc))
+ else:
+ pass
except Exception as exc:
raise Spartacus.Database.Exception(str(exc))
def Cancel(self, p_usesameconn=True):
|
<I>: Fixing bug in data compare
|
wind39_spartacus
|
train
|
py
|
ca388b415048420a71456c65d1f5c5f9fc4ee91a
|
diff --git a/kernel/classes/ezcontentobjectattribute.php b/kernel/classes/ezcontentobjectattribute.php
index <HASH>..<HASH> 100644
--- a/kernel/classes/ezcontentobjectattribute.php
+++ b/kernel/classes/ezcontentobjectattribute.php
@@ -30,6 +30,7 @@ class eZContentObjectAttribute extends eZPersistentObject
$this->InputParameters = false;
$this->HasValidationError = false;
$this->DataTypeCustom = null;
+ $this->DataTypeString = null;
$this->eZPersistentObject( $row );
}
|
Attempt to fix a weird test failure
|
ezsystems_ezpublish-legacy
|
train
|
php
|
ed3ee3dd4e6b1e0544d5b5244a8b44890ce8e299
|
diff --git a/mpu/units/__init__.py b/mpu/units/__init__.py
index <HASH>..<HASH> 100644
--- a/mpu/units/__init__.py
+++ b/mpu/units/__init__.py
@@ -147,7 +147,8 @@ def get_currency(currency_str):
reader = csv.reader(fp, delimiter=',', quotechar='"')
next(reader, None) # skip the headers
for row in reader:
- is_currency = (row[0] == currency_str or row[1] == currency_str or
+ is_currency = (row[0] == currency_str or
+ row[1] == currency_str or
row[2] == currency_str)
if is_currency:
entity = row[0]
|
MINOR: Stylistic change
|
MartinThoma_mpu
|
train
|
py
|
fde66013f5f7570fe27e4785472a7b7ac1c7e495
|
diff --git a/src/Webiny/Component/Entity/EntityCollection.php b/src/Webiny/Component/Entity/EntityCollection.php
index <HASH>..<HASH> 100755
--- a/src/Webiny/Component/Entity/EntityCollection.php
+++ b/src/Webiny/Component/Entity/EntityCollection.php
@@ -69,6 +69,25 @@ class EntityCollection implements \IteratorAggregate, \ArrayAccess
}
/**
+ * Convert EntityCollection to array.<br>
+ * Each EntityAbstract wil be converted to array using $fields and $nestedLevel specified.<br>
+ * If no fields are specified, array will contain all simple and Many2One attributes
+ *
+ * @param string $fields List of fields to extract
+ *
+ * @param int $nestedLevel How many levels to extract (Default: 1, means SELF + 1 level)
+ *
+ * @return array
+ */
+ public function toArray($fields = '', $nestedLevel = 1){
+ $data = [];
+ foreach($this->getIterator() as $entity){
+ $data[] = $entity->toArray($fields, $nestedLevel);
+ }
+ return $data;
+ }
+
+ /**
* Add item to collection
*
* @param EntityAbstract $item
|
EntityCollection now has toArray() method which converts the entire resultset to array ready for sending to browser.
|
Webiny_Framework
|
train
|
php
|
6c507b4138056ccdcf36caad3b964a8ca28a85da
|
diff --git a/ui/src/dashboards/components/DashboardsPageContents.js b/ui/src/dashboards/components/DashboardsPageContents.js
index <HASH>..<HASH> 100644
--- a/ui/src/dashboards/components/DashboardsPageContents.js
+++ b/ui/src/dashboards/components/DashboardsPageContents.js
@@ -26,6 +26,7 @@ class DashboardsPageContents extends Component {
onCreateDashboard,
dashboardLink,
} = this.props
+ const {searchTerm} = this.state
let tableHeader
if (dashboards === null) {
@@ -36,7 +37,7 @@ class DashboardsPageContents extends Component {
tableHeader = `${dashboards.length} Dashboards`
}
const filteredDashboards = dashboards.filter(d =>
- d.name.includes(this.state.searchTerm)
+ d.name.toLowerCase().includes(searchTerm.toLowerCase())
)
return (
|
Make dashboards filter work for lowercase search terms
|
influxdata_influxdb
|
train
|
js
|
013edc4f158fea6c03cb590a25cfc8501a0e6188
|
diff --git a/src/index.js b/src/index.js
index <HASH>..<HASH> 100644
--- a/src/index.js
+++ b/src/index.js
@@ -586,7 +586,8 @@ class GDB extends EventEmitter {
* @returns {Promise<string>} A promise that resolves with the result of command execution.
*/
async execCLI (cmd, thread) {
- return await this._exec(thread ? `thread apply ${thread} ${cmd}` : cmd, 'cli')
+ let res = await this._exec(thread ? `thread apply ${thread} ${cmd}` : cmd, 'cli')
+ return thread ? res.split('\n').slice(2).join('\n') : res
}
/**
|
fix bug with extraneous lines
|
taskcluster_gdb-js
|
train
|
js
|
1a5c9b46693f36a961da3c9e6d84d803683e42db
|
diff --git a/umap/umap_.py b/umap/umap_.py
index <HASH>..<HASH> 100644
--- a/umap/umap_.py
+++ b/umap/umap_.py
@@ -252,6 +252,8 @@ def nearest_neighbors(
distance_func = sparse.sparse_named_distances[metric]
if metric in sparse.sparse_need_n_features:
metric_kwds["n_features"] = X.shape[1]
+ elif callable(metric):
+ distance_func = metric
else:
raise ValueError(
"Metric {} not supported for sparse " + "data".format(metric)
|
Allow passing a callable as a sparse metric
|
lmcinnes_umap
|
train
|
py
|
a57549d0a1d22c695413fb0385a00d9cc93d3039
|
diff --git a/src/Symfony/Component/HttpKernel/Kernel.php b/src/Symfony/Component/HttpKernel/Kernel.php
index <HASH>..<HASH> 100644
--- a/src/Symfony/Component/HttpKernel/Kernel.php
+++ b/src/Symfony/Component/HttpKernel/Kernel.php
@@ -59,12 +59,12 @@ abstract class Kernel implements KernelInterface, TerminableInterface
protected $startTime;
protected $loadClassCache;
- const VERSION = '2.8.43';
- const VERSION_ID = 20843;
+ const VERSION = '2.8.44-DEV';
+ const VERSION_ID = 20844;
const MAJOR_VERSION = 2;
const MINOR_VERSION = 8;
- const RELEASE_VERSION = 43;
- const EXTRA_VERSION = '';
+ const RELEASE_VERSION = 44;
+ const EXTRA_VERSION = 'DEV';
const END_OF_MAINTENANCE = '11/2018';
const END_OF_LIFE = '11/2019';
|
bumped Symfony version to <I>
|
symfony_symfony
|
train
|
php
|
3d5aa175d73110fb511e491eea77a213e1c2b608
|
diff --git a/lib/module_funcs.rb b/lib/module_funcs.rb
index <HASH>..<HASH> 100644
--- a/lib/module_funcs.rb
+++ b/lib/module_funcs.rb
@@ -2,7 +2,7 @@ module OfflineMirror
VERSION_MAJOR = 0
VERSION_MINOR = 1
- @@app_online_flag = true
+ @@app_online_flag = nil
# Used in the environment configuration file to set the app to online or offline mode.
# This should not be called from within the app.
@@ -18,9 +18,9 @@ module OfflineMirror
# Returns true if the app is in online mode (or in other words, this is the main server)
def self.app_online?
case @@app_online_flag
- when true then true
- when false then false
- else raise AppModeUnknownError.new
+ when true then true
+ when false then false
+ else raise AppModeUnknownError.new
end
end
|
App online flag should be nil by default
|
DavidMikeSimon_offroad
|
train
|
rb
|
93153ad116cad05523b2fb7531f22a0445b356d8
|
diff --git a/packages/plugin-collector-files/src/index.js b/packages/plugin-collector-files/src/index.js
index <HASH>..<HASH> 100644
--- a/packages/plugin-collector-files/src/index.js
+++ b/packages/plugin-collector-files/src/index.js
@@ -43,12 +43,20 @@ export function getFields(json: PhenomicTransformResult) {
return keys.filter(key => key !== "author" && key !== "authors");
}
+function isLiteral(value) {
+ const type = typeof value;
+ return value === "string" || value === "number" || value === "boolean";
+}
+
+function isArrayOfLiterals(array) {
+ return Array.isArray(array) && array.every(isLiteral);
+}
+
export function getFieldValue(json: PhenomicTransformResult, key: string) {
- if (Array.isArray(json.data[key])) {
+ if (isArrayOfLiterals(json.data[key])) {
return json.data[key];
}
- const type = typeof json.data[key];
- if (type === "string" || type === "number" || type === "boolean") {
+ if (isLiteral(json.data[key])) {
return [json.data[key]];
}
return [];
|
Handle arrays of non-literals in front-matter
Closes #<I>
|
phenomic_phenomic
|
train
|
js
|
f5dcd6ff3926c6bfabab54025b4c90808a1bf570
|
diff --git a/lib/github_api/api.rb b/lib/github_api/api.rb
index <HASH>..<HASH> 100644
--- a/lib/github_api/api.rb
+++ b/lib/github_api/api.rb
@@ -174,8 +174,8 @@ module Github
end
def _merge_mime_type(resource, params) # :nodoc:
- params['resource'] = resource
- params['mime_type'] = params['mime_type'] || :raw
+# params['resource'] = resource
+# params['mime_type'] = params['mime_type'] || :raw
end
# TODO add to core extensions
|
Comment out custom mime types, needs separate treatment.
|
piotrmurach_github
|
train
|
rb
|
6124a6aedcfa43cfc8f2675a4425e06af8fd4aa0
|
diff --git a/test/adapters/live_test.rb b/test/adapters/live_test.rb
index <HASH>..<HASH> 100644
--- a/test/adapters/live_test.rb
+++ b/test/adapters/live_test.rb
@@ -11,7 +11,7 @@ else
loaded_adapters = Faraday::Adapter.all_loaded_constants
loaded_adapters -= [Faraday::Adapter::ActionDispatch]
# https://github.com/geemus/excon/issues/98
- loaded_adapters -= [Faraday::Adapter::Excon] if "rbx" == RUBY_ENGINE
+ loaded_adapters -= [Faraday::Adapter::Excon] if defined? RUBY_ENGINE and "rbx" == RUBY_ENGINE
loaded_adapters << :default
end
|
RUBY_ENGINE doesn't have to be present
It wasn't found on <I>p<I>
|
lostisland_faraday
|
train
|
rb
|
bb380a5393bafadfcac3cae8e0f0cff91209abb7
|
diff --git a/src/Mapping/Driver/XmlDriver.php b/src/Mapping/Driver/XmlDriver.php
index <HASH>..<HASH> 100644
--- a/src/Mapping/Driver/XmlDriver.php
+++ b/src/Mapping/Driver/XmlDriver.php
@@ -13,7 +13,7 @@ use Doctrine\ODM\PHPCR\Mapping\Driver\XmlDriver as BaseXmlDriver;
*/
class XmlDriver extends BaseXmlDriver
{
- private const DEFAULT_FILE_EXTENSION = '.phpcr.xml';
+ const DEFAULT_FILE_EXTENSION = '.phpcr.xml';
/**
* {@inheritdoc}
diff --git a/src/Mapping/Driver/YamlDriver.php b/src/Mapping/Driver/YamlDriver.php
index <HASH>..<HASH> 100644
--- a/src/Mapping/Driver/YamlDriver.php
+++ b/src/Mapping/Driver/YamlDriver.php
@@ -13,7 +13,7 @@ use Doctrine\ODM\PHPCR\Mapping\Driver\YamlDriver as BaseYamlDriver;
*/
class YamlDriver extends BaseYamlDriver
{
- private const DEFAULT_FILE_EXTENSION = '.phpcr.yml';
+ const DEFAULT_FILE_EXTENSION = '.phpcr.yml';
/**
* {@inheritdoc}
|
revert a code cleanup that caused a regression in #<I>
|
doctrine_DoctrinePHPCRBundle
|
train
|
php,php
|
8bd49ec01f8bb8b83b118e4acb6b2efb9a13ac69
|
diff --git a/lib/file/file_storage.php b/lib/file/file_storage.php
index <HASH>..<HASH> 100644
--- a/lib/file/file_storage.php
+++ b/lib/file/file_storage.php
@@ -710,7 +710,7 @@ class file_storage {
break;
case 'image/png':
- $quality = int($quality);
+ $quality = (int)$quality;
imagepng($img, NULL, $quality, NULL);
break;
|
fixing int() should be (int) in file storage
|
moodle_moodle
|
train
|
php
|
023d59bb13dc626266aeb47cd0c35b5925c5aa59
|
diff --git a/modules/admin/src/resources/js/directives.js b/modules/admin/src/resources/js/directives.js
index <HASH>..<HASH> 100644
--- a/modules/admin/src/resources/js/directives.js
+++ b/modules/admin/src/resources/js/directives.js
@@ -410,6 +410,11 @@
"initvalue": "@initvalue"
},
link: function(scope) {
+
+ if(jQuery.isNumeric(scope.model)){
+ scope.model = typeCastValue(scope.model);
+ }
+
$timeout(function(){
scope.$watch(function() { return scope.model }, function(n, o) {
if (n == undefined || n == null || n == '') {
|
Update directives.js (#<I>)
use typeCastValue in zaaSelect.
|
luyadev_luya
|
train
|
js
|
88d61b5aabc50851e1446a463ababd1f7ea85c17
|
diff --git a/lib/ooor/transport/json_client.rb b/lib/ooor/transport/json_client.rb
index <HASH>..<HASH> 100644
--- a/lib/ooor/transport/json_client.rb
+++ b/lib/ooor/transport/json_client.rb
@@ -25,6 +25,11 @@ module Ooor
end
def oe_request(session_info, url, params, method, *args)
+ if session_info[:req_id]
+ session_info[:req_id] += 1
+ else
+ session_info[:req_id] = 1
+ end
if session_info[:sid] # required on v7 but forbidden in v8
params.merge!({"session_id" => session_info[:session_id]})
end
@@ -32,7 +37,7 @@ module Ooor
req.headers['Cookie'] = session_info[:cookie]
req.url url
req.headers['Content-Type'] = 'application/json'
- req.body = {"jsonrpc"=>"2.0","method"=>"call", "params" => params, "id"=>"r42"}.to_json
+ req.body = {"jsonrpc"=>"2.0","method"=>"call", "params" => params, "id"=>session_info[:req_id]}.to_json
end.body)
if response["error"]
faultCode = response["error"]['data']['fault_code'] || response["error"]['data']['debug']
|
increment request ids like the browser
|
akretion_ooor
|
train
|
rb
|
ac8e4c7d9c3971a9aabb2e514e10949a5345a544
|
diff --git a/rdbtools/memprofiler.py b/rdbtools/memprofiler.py
index <HASH>..<HASH> 100644
--- a/rdbtools/memprofiler.py
+++ b/rdbtools/memprofiler.py
@@ -87,10 +87,10 @@ class PrintAllKeys():
record.bytes, record.encoding, record.size, record.len_largest_element))
else:
heappush(self._heap, (record.bytes, record))
- self._heap = nlargest(int(self._largest), self._heap)
def dump_heap(self):
if self._largest is not None:
+ self._heap = nlargest(int(self._largest), self._heap)
self._largest = None
while self._heap:
|
Fixed nlargest on every iteration
Realized the fallacy of this after a good night's rest. Now running nlargest at the end after loading everything into a heapq. Not ideal as it could cause memory issues, but I was able to successfully parse a <I>m record rdb and grab the top <I> largest keys without any issues.
|
sripathikrishnan_redis-rdb-tools
|
train
|
py
|
fb4200efefe4d0fad2461d1d86379fdb1a2080be
|
diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -2,7 +2,11 @@ var parseUrl = require('url').parse;
var isSecure = function(req) {
if (req.secure) {
return true;
- } else if (req.get('X-Forwarded-Proto').toLowerCase() === 'https') {
+ } else if (
+ typeof req.get('X-Forwarded-Proto') !== 'undefined' &&
+ typeof req.get('X-Forwarded-Proto').toLowerCase !== 'undefined' &&
+ req.get('X-Forwarded-Proto').toLowerCase() === 'https'
+ ) {
return true;
}
return false;
|
Protect against undefined error on header not set
|
battlejj_express-force-ssl
|
train
|
js
|
fbdb3bf2d8f97d059c3f57ae957b2339b5a13c82
|
diff --git a/api/cloudcontroller/ccv3/internal/api_routes.go b/api/cloudcontroller/ccv3/internal/api_routes.go
index <HASH>..<HASH> 100644
--- a/api/cloudcontroller/ccv3/internal/api_routes.go
+++ b/api/cloudcontroller/ccv3/internal/api_routes.go
@@ -22,7 +22,6 @@ const (
GetAppsRequest = "GetApps"
GetBuildRequest = "GetBuild"
GetDropletRequest = "GetDroplet"
- GetProcessInstancesRequest = "GetProcessInstances"
GetIsolationSegmentOrganizationsRequest = "GetIsolationSegmentRelationshipOrganizations"
GetIsolationSegmentRequest = "GetIsolationSegment"
GetIsolationSegmentsRequest = "GetIsolationSegments"
|
Remove duplicate declaration of GetProcessInstancesRequest
|
cloudfoundry_cli
|
train
|
go
|
1c39ab66626cb54b8bd65b094901ea004a352ad2
|
diff --git a/IPython/html/widgets/widget_container.py b/IPython/html/widgets/widget_container.py
index <HASH>..<HASH> 100644
--- a/IPython/html/widgets/widget_container.py
+++ b/IPython/html/widgets/widget_container.py
@@ -36,13 +36,17 @@ class ContainerWidget(DOMWidget):
"""Validate children list.
Makes sure only one instance of any given model can exist in the
- children list."""
+ children list.
+ An excellent post on uniqifiers is available at
+ http://www.peterbe.com/plog/uniqifiers-benchmark
+ which provides the inspiration for using this implementation. Below
+ I've implemented the `f5` algorithm using Python comprehensions."""
if new is not None and isinstance(new, list):
- children = []
- for child in new:
- if child not in children:
- children.append(child)
- self._children = children
+ seen = {}
+ def add_item(i):
+ seen[i.model_id] = True
+ return i
+ return [add_item(i) for i in new if not i.model_id in seen]
class PopupWidget(ContainerWidget):
|
Replace O(N^2) algorithm with a faster one.
|
jupyter-widgets_ipywidgets
|
train
|
py
|
ddd798596714b71e6f4c7afb68a7a04523f42605
|
diff --git a/aversion.py b/aversion.py
index <HASH>..<HASH> 100644
--- a/aversion.py
+++ b/aversion.py
@@ -578,7 +578,8 @@ class AVersion(object):
# Add in information about the formats
for suffix, ctype in self.formats.items():
types.setdefault(ctype, dict(name=ctype, params={}))
- types[ctype]['suffix'] = suffix
+ types[ctype].setdefault('suffix', [])
+ types[ctype]['suffix'].append(suffix)
# Now, build the config dictionary tree we will pass to
# requests
diff --git a/test_aversion.py b/test_aversion.py
index <HASH>..<HASH> 100644
--- a/test_aversion.py
+++ b/test_aversion.py
@@ -608,8 +608,8 @@ class AVersionTest(unittest2.TestCase):
},
},
'types': {
- 'a/a': dict(name='a/a', params={}, suffix='.a'),
- 'a/b': dict(name='a/b', params={}, suffix='.b'),
+ 'a/a': dict(name='a/a', params={}, suffix=['.a']),
+ 'a/b': dict(name='a/b', params={}, suffix=['.b']),
'a/c': dict(name='a/c', params={}),
},
})
|
Allow multiple suffixes for a single type name.
|
klmitch_aversion
|
train
|
py,py
|
aa118cef710fe7accc9fcc892da4abd8b7dbf3f4
|
diff --git a/synapse/tests/test_lib_lmdb.py b/synapse/tests/test_lib_lmdb.py
index <HASH>..<HASH> 100644
--- a/synapse/tests/test_lib_lmdb.py
+++ b/synapse/tests/test_lib_lmdb.py
@@ -30,6 +30,8 @@ class LmdbTest(SynTest):
lenv = lmdb.open(dirn, writemap=True, max_dbs=128)
metr = s_lmdb.Metrics(lenv)
+ self.eq(metr.stat(), {})
+
with lenv.begin(write=True) as xact:
metr.inc(xact, 'woot', 20)
@@ -52,6 +54,8 @@ class LmdbTest(SynTest):
self.len(1, retn)
self.eq(retn[0][1].get('hehe'), 20)
+ self.eq(metr.stat(), {'woot': 40})
+
def test_lmdb_propstor(self):
with self.getTestDir() as dirn:
|
Add LMDB metric stat test
|
vertexproject_synapse
|
train
|
py
|
ebe9f99c9d8f641c6d86805bd72d41dbba3e5a23
|
diff --git a/qtpylib/tools.py b/qtpylib/tools.py
index <HASH>..<HASH> 100644
--- a/qtpylib/tools.py
+++ b/qtpylib/tools.py
@@ -32,8 +32,10 @@ from dateutil.relativedelta import relativedelta, FR
from dateutil.parser import parse as parse_date
from pytz import timezone
-from ezibpy.utils import create_logger # For re-export
-
+# for re-export
+from ezibpy.utils import (
+ createLogger, order_to_dict, contract_to_dict
+)
# =============================================
def chmod(f):
|
expanded ezibpy imports for re-exports
importing createLogger, order_to_dict, contract_to_dict
|
ranaroussi_qtpylib
|
train
|
py
|
9f585d70171b3f137bfb674c8c33ec7f5a023e55
|
diff --git a/src/public-path.js b/src/public-path.js
index <HASH>..<HASH> 100644
--- a/src/public-path.js
+++ b/src/public-path.js
@@ -2,5 +2,5 @@
// Dynamically inject webpack `publicPath`, for resolving assets locations.
// https://webpack.js.org/guides/public-path/#on-the-fly
// https://webpack.js.org/configuration/output/#output-publicpath
-if (process.env.NODE_ENV !== 'test')
+if (process.env.NODE_ENV === 'production')
__webpack_public_path__ = window.app.cdnUrl;
|
chore: use html-webpack plugin
|
commercetools_merchant-center-application-kit
|
train
|
js
|
9c7c75180ab32d71de6bbed7ff468344556ef240
|
diff --git a/libs/babel/babel.js b/libs/babel/babel.js
index <HASH>..<HASH> 100644
--- a/libs/babel/babel.js
+++ b/libs/babel/babel.js
@@ -28,9 +28,9 @@ function culturize (context, culture) {
// don't botch arrays into objects
if (Array.isArray(context)) {
- context.forEach((array_item) => {
- culturize(array_item)
- })
+ for (var i = 0, l = context.length; i < l; i++) {
+ context[i] = culturize(context[i], culture)
+ }
return context
}
|
fix(babel): don't loose culture while translating arrays and objects
- - -
Closes #<I>
|
Gottwik_Enduro
|
train
|
js
|
f6b1e617a5de3ac1613ddc752f8117b418a21986
|
diff --git a/packages/reactabular-tree/src/move-rows.js b/packages/reactabular-tree/src/move-rows.js
index <HASH>..<HASH> 100644
--- a/packages/reactabular-tree/src/move-rows.js
+++ b/packages/reactabular-tree/src/move-rows.js
@@ -33,6 +33,14 @@ const moveTreeRows = ({
[idField]: row[parentField]
});
+ if (index < 0) {
+ console.warn( // eslint-disable-line no-console
+ 'Failed to find the old parent', rows, row, idField, parentField
+ );
+
+ return row;
+ }
+
// Figure out the new id based on that index
const id = movedRows[index][idField];
|
fix(tree) - Warn if `moveRows` failed to find old parent
|
reactabular_reactabular
|
train
|
js
|
c341cf4638c6758c09cfca050b325ee8e211dafd
|
diff --git a/ghost/admin/app/components/gh-members-lab-setting.js b/ghost/admin/app/components/gh-members-lab-setting.js
index <HASH>..<HASH> 100644
--- a/ghost/admin/app/components/gh-members-lab-setting.js
+++ b/ghost/admin/app/components/gh-members-lab-setting.js
@@ -161,9 +161,14 @@ export default Component.extend({
let currentCurrencyComplimentary = stripeProcessor.config.plans.filter(plan => (plan.currency === event.value && plan.name === 'Complimentary'));
if (!currentCurrencyComplimentary.length) {
- let complimentary = stripeProcessor.config.plans.find(plan => (plan.name === 'Complimentary'));
- let newComplimentary = Object.assign({}, complimentary, {currency: event.value});
- stripeProcessor.config.plans.push(newComplimentary);
+ let complimentary = {
+ name: 'Complimentary',
+ currency: event.value,
+ interval: 'year',
+ amount: '0'
+ };
+
+ stripeProcessor.config.plans.push(complimentary);
}
stripeProcessor.config.currency = event.value;
|
🐛 Fixed complimentary plan creation when there was none in plans collection
closes <URL>
|
TryGhost_Ghost
|
train
|
js
|
cbf5dd3f8efc106bcb438203942d781831121071
|
diff --git a/src/test/java/org/junit/tests/internal/runners/statements/FailOnTimeoutTest.java b/src/test/java/org/junit/tests/internal/runners/statements/FailOnTimeoutTest.java
index <HASH>..<HASH> 100644
--- a/src/test/java/org/junit/tests/internal/runners/statements/FailOnTimeoutTest.java
+++ b/src/test/java/org/junit/tests/internal/runners/statements/FailOnTimeoutTest.java
@@ -1,11 +1,13 @@
package org.junit.tests.internal.runners.statements;
-import static java.lang.Long.*;
-import static java.lang.Math.*;
-import static java.lang.System.*;
-import static java.lang.Thread.*;
-import static org.hamcrest.core.Is.*;
-import static org.junit.Assert.*;
+import static java.lang.Long.MAX_VALUE;
+import static java.lang.Math.atan;
+import static java.lang.System.currentTimeMillis;
+import static java.lang.Thread.sleep;
+import static org.hamcrest.core.Is.is;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
import org.junit.Rule;
import org.junit.Test;
import org.junit.internal.runners.statements.FailOnTimeout;
|
Organized imports according to project's conventions
|
junit-team_junit4
|
train
|
java
|
81462f7c73cbac4cbf4915711cc22846b67e7677
|
diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -62,9 +62,7 @@
var basename = path.basename( file, '.js' );
if( _.contains( MOCHA_LIFECYCLE_METHODS, basename ) ) {
- global[ basename ]( function() {
- requireAbsolute( file );
- } );
+ requireAbsolute( file );
} else {
describe( basename, function() {
requireAbsolute( file );
diff --git a/tests/Test Directory/Before Each/beforeEach.js b/tests/Test Directory/Before Each/beforeEach.js
index <HASH>..<HASH> 100644
--- a/tests/Test Directory/Before Each/beforeEach.js
+++ b/tests/Test Directory/Before Each/beforeEach.js
@@ -5,4 +5,6 @@
* of the MIT license. See the LICENSE file for details.
*/
-global.before_set = true;
+beforeEach( function() {
+ global.before_set = true;
+} );
|
Changed the way mocha-directory interprets lifecycle methods so you can have async lifecycle methods
|
frenchie4111_mocha-directory
|
train
|
js,js
|
c6cf153954bc49abf916d5ba78fba525b62b45b1
|
diff --git a/export.js b/export.js
index <HASH>..<HASH> 100644
--- a/export.js
+++ b/export.js
@@ -44,7 +44,7 @@ function export_sol(paths, state, pure) {
return _.map( env.objects, (obj, name) => {
var type = obj.type.indexOf('[') > -1 ?
obj.type.split('[')[0] : obj.type;
- return ` ${prefix}${groupType != null ? '' : envName + '.'}${name} = ${type}(${obj.value});`;
+ return ` ${prefix}${groupType != null ? '' : envName + '.'}${name} = ${type}(${obj.value.toUpperCase()});`;
})
.concat( "subEnvs" in dtree ? _.map(dtree.subEnvs, (_dtree, pkgName) => {
return genEnvSpec(_dtree, `${prefix.length > 0 ? prefix : envName + '.'}pkg["${pkgName}"].`, env.type);
|
addreses have to be uppercase
|
dapphub_dapple-core
|
train
|
js
|
969be9c50ec47cc1ad2fc8024f78eb144f725cd9
|
diff --git a/tests.py b/tests.py
index <HASH>..<HASH> 100644
--- a/tests.py
+++ b/tests.py
@@ -28,12 +28,12 @@ check_call = partial(subprocess.check_call, env=SUBPROCESS_ENV)
check_output = partial(subprocess.check_output, env=SUBPROCESS_ENV)
xfail_if_no_git = pytest.mark.xfail(
- call(["git", "--help"], shell=True) != 1,
+ call(["git", "help"]) != 0,
reason="git is not installed"
)
xfail_if_no_hg = pytest.mark.xfail(
- call(["hg", "--help"], shell=True) != 0,
+ call(["hg", "help"]) != 0,
reason="hg is not installed"
)
|
simplify vcs xfail detection
|
peritus_bumpversion
|
train
|
py
|
7deb66a193b6a33d96bd665d313b38a4f1b0b685
|
diff --git a/JavaScript/packages/datatypes-date-time/index.js b/JavaScript/packages/datatypes-date-time/index.js
index <HASH>..<HASH> 100644
--- a/JavaScript/packages/datatypes-date-time/index.js
+++ b/JavaScript/packages/datatypes-date-time/index.js
@@ -5,6 +5,7 @@ module.exports = {
TimexProperty: require('./src/timexProperty.js').TimexProperty,
TimexSet: require('./src/timexSet.js').TimexSet,
creator: require('./src/timexCreator.js'),
- resolver: require('./src/timexRangeResolver.js')
+ resolver: require('./src/timexRangeResolver.js'),
+ valueResolver: require('./src/timexResolver.js')
};
|
expose valueResolver through index.js (#<I>)
|
Microsoft_Recognizers-Text
|
train
|
js
|
a7917b83d264358d4af57da3831b81479b46ae4f
|
diff --git a/owasp-security-logging-logback/src/main/java/org/owasp/security/logging/layout/cef/CEFLoggingLayout.java b/owasp-security-logging-logback/src/main/java/org/owasp/security/logging/layout/cef/CEFLoggingLayout.java
index <HASH>..<HASH> 100644
--- a/owasp-security-logging-logback/src/main/java/org/owasp/security/logging/layout/cef/CEFLoggingLayout.java
+++ b/owasp-security-logging-logback/src/main/java/org/owasp/security/logging/layout/cef/CEFLoggingLayout.java
@@ -5,7 +5,6 @@ import ch.qos.logback.core.LayoutBase;
public class CEFLoggingLayout extends LayoutBase<ILoggingEvent> {
- @Override
public String doLayout(ILoggingEvent event) {
Prefix prefix = new Prefix();
prefix.name = event.getMessage();
|
Remove erroneous @Override notation
|
javabeanz_owasp-security-logging
|
train
|
java
|
fea91c6cd6c4af0676f2cdda96c3547bedefa576
|
diff --git a/rocks/RocksDatabase.java b/rocks/RocksDatabase.java
index <HASH>..<HASH> 100644
--- a/rocks/RocksDatabase.java
+++ b/rocks/RocksDatabase.java
@@ -153,7 +153,7 @@ public class RocksDatabase implements TypeDB.Database {
}
}
- RocksSession createAndOpenSession(Arguments.Session.Type type, Options.Session options) {
+ public RocksSession createAndOpenSession(Arguments.Session.Type type, Options.Session options) {
if (!isOpen.get()) throw TypeDBException.of(DATABASE_CLOSED, name);
long lock = 0;
diff --git a/server/TypeDBService.java b/server/TypeDBService.java
index <HASH>..<HASH> 100644
--- a/server/TypeDBService.java
+++ b/server/TypeDBService.java
@@ -59,7 +59,7 @@ public class TypeDBService extends TypeDBGrpc.TypeDBImplBase {
private static final Logger LOG = LoggerFactory.getLogger(TypeDBService.class);
- private final TypeDB typedb;
+ protected final TypeDB typedb;
private final ConcurrentMap<UUID, SessionService> sessionServices;
public TypeDBService(TypeDB typedb) {
|
Make session creation method public, and typedb object protected for TypeDB Cluster
|
graknlabs_grakn
|
train
|
java,java
|
7958d7b841b7260a0ded3ab5d1dbc5506d0fa50f
|
diff --git a/peri/comp/comp.py b/peri/comp/comp.py
index <HASH>..<HASH> 100644
--- a/peri/comp/comp.py
+++ b/peri/comp/comp.py
@@ -405,7 +405,7 @@ class ComponentCollection(Component):
if diff:
for c in comps:
- c.update(param, vals[0])
+ c.set_values(param, vals[0])
for param, comps in self.lmap.iteritems():
if isinstance(comps, list) and len(comps) > 1:
|
sync_params: can't update before initialization, change to set_values
|
peri-source_peri
|
train
|
py
|
fe1389124e16123ecda80c92a376306aa847e5c9
|
diff --git a/pympi/Elan.py b/pympi/Elan.py
index <HASH>..<HASH> 100644
--- a/pympi/Elan.py
+++ b/pympi/Elan.py
@@ -1681,8 +1681,12 @@ def to_eaf(file_path, eaf_obj, pretty=True):
if pretty:
indent(ADOCUMENT)
if file_path == '-':
- file_path = sys.stdout
- elif os.access(file_path, os.F_OK):
- os.rename(file_path, '{}.bak'.format(file_path))
- etree.ElementTree(ADOCUMENT).write(
- file_path, xml_declaration=True, encoding='UTF-8')
+ try:
+ sys.stdout.write(etree.tostring(ADOCUMENT, encoding='unicode'))
+ except LookupError:
+ sys.stdout.write(etree.tostring(ADOCUMENT, encoding='UTF-8'))
+ else:
+ if os.access(file_path, os.F_OK):
+ os.rename(file_path, '{}.bak'.format(file_path))
+ etree.ElementTree(ADOCUMENT).write(
+ file_path, xml_declaration=True, encoding='UTF-8')
|
fix issue #<I>, a bit ugly but it works
|
dopefishh_pympi
|
train
|
py
|
ea74a3efaaae2b99fda088003c302194cdc28cbd
|
diff --git a/src/main/java/com/mebigfatguy/fbcontrib/detect/SuspiciousJDKVersionUse.java b/src/main/java/com/mebigfatguy/fbcontrib/detect/SuspiciousJDKVersionUse.java
index <HASH>..<HASH> 100755
--- a/src/main/java/com/mebigfatguy/fbcontrib/detect/SuspiciousJDKVersionUse.java
+++ b/src/main/java/com/mebigfatguy/fbcontrib/detect/SuspiciousJDKVersionUse.java
@@ -277,6 +277,10 @@ public class SuspiciousJDKVersionUse extends BytecodeScanningDetector {
return false;
}
+ if (className.startsWith("javax/xml/")) {
+ return true;
+ }
+
int lastSlashPos = className.lastIndexOf('/');
String packageName = className.substring(0, lastSlashPos);
ZipEntry ze = jdkZip.getEntry(packageName);
|
javax.xml is too amorphous to check, just assume it's external
|
mebigfatguy_fb-contrib
|
train
|
java
|
49f73db1f421a59aca391288441e673d74645c4f
|
diff --git a/indra/sources/eidos/eidos_reader.py b/indra/sources/eidos/eidos_reader.py
index <HASH>..<HASH> 100644
--- a/indra/sources/eidos/eidos_reader.py
+++ b/indra/sources/eidos/eidos_reader.py
@@ -69,7 +69,7 @@ class EidosReader(object):
eidos = autoclass(eidos_package + '.EidosSystem')
self.eidos_reader = eidos(autoclass('java.lang.Object')())
- annot_doc = self.eidos_reader.extractFromText(text, False)
+ annot_doc = self.eidos_reader.extractFromText(text, False, False)
if format == 'json':
mentions = annot_doc.odinMentions()
ser = autoclass(eidos_package + '.serialization.json.WMJSONSerializer')
|
Update call to extraction with new extra arg
|
sorgerlab_indra
|
train
|
py
|
c7b8b2ee07c019389af61f81c939af7f2c146814
|
diff --git a/src/pytest_cov/engine.py b/src/pytest_cov/engine.py
index <HASH>..<HASH> 100644
--- a/src/pytest_cov/engine.py
+++ b/src/pytest_cov/engine.py
@@ -115,7 +115,10 @@ class Central(CovController):
self.cov = coverage.coverage(source=self.cov_source,
config_file=self.cov_config)
- self.cov.erase()
+ if self.config.option.append_coverage:
+ self.cov.load()
+ else:
+ self.cov.erase()
self.cov.start()
self.set_env()
diff --git a/src/pytest_cov/plugin.py b/src/pytest_cov/plugin.py
index <HASH>..<HASH> 100644
--- a/src/pytest_cov/plugin.py
+++ b/src/pytest_cov/plugin.py
@@ -36,6 +36,10 @@ def pytest_addoption(parser):
'default: False')
group.addoption('--cov-fail-under', action='store', metavar='MIN', type='int',
help='Fail if the total coverage is less than MIN.')
+ group.addoption('--append-cov', action='store_true', default=False,
+ dest='append_coverage',
+ help='do not delete coverage but append to current, '
+ 'default: False')
@pytest.mark.tryfirst
|
add option to append coverage instead of deleting the .coverage file fixes #<I>
|
pytest-dev_pytest-cov
|
train
|
py,py
|
632305d847f71adeeeacfbeadeb2c480dffdabdd
|
diff --git a/pymc3/model.py b/pymc3/model.py
index <HASH>..<HASH> 100644
--- a/pymc3/model.py
+++ b/pymc3/model.py
@@ -312,18 +312,15 @@ class ContextMeta(type):
"""Return the most recently pushed context object of type ``cls``
on the stack, or ``None``. If ``error_if_none`` is True (default),
raise a ``TypeError`` instead of returning ``None``."""
- idx = -1
- while True:
- try:
- candidate = cls.get_contexts()[idx] # type: Optional[T]
- except IndexError as e:
- # Calling code expects to get a TypeError if the entity
- # is unfound, and there's too much to fix.
- if error_if_none:
- raise TypeError("No %s on context stack" % str(cls))
- return None
- return candidate
- idx = idx - 1
+ try:
+ candidate = cls.get_contexts()[-1] # type: Optional[T]
+ except IndexError as e:
+ # Calling code expects to get a TypeError if the entity
+ # is unfound, and there's too much to fix.
+ if error_if_none:
+ raise TypeError("No %s on context stack" % str(cls))
+ return None
+ return candidate
def get_contexts(cls) -> List[T]:
"""Return a stack of context instances for the ``context_class``
|
Remove dead code in get_context. (#<I>)
Fix for issue #<I>.
|
pymc-devs_pymc
|
train
|
py
|
67035e75f58e66670b74d7eae1c52f5c9babc105
|
diff --git a/ddocs.go b/ddocs.go
index <HASH>..<HASH> 100644
--- a/ddocs.go
+++ b/ddocs.go
@@ -40,6 +40,11 @@ func (b *Bucket) GetDDocs() (DDocsResult, error) {
uri := b.DDocs.URI
b.RUnlock()
+ // MB-23555 ephemeral buckets have no ddocs
+ if uri == "" {
+ return DDocsResult{}, nil
+ }
+
err := pool.client.parseURLResponse(uri, &ddocsResult)
if err != nil {
return DDocsResult{}, err
@@ -62,6 +67,11 @@ func (b *Bucket) GetDDocsWithRetry() (DDocsResult, error) {
uri := b.DDocs.URI
b.RUnlock()
+ // MB-23555 ephemeral buckets have no ddocs
+ if uri == "" {
+ return DDocsResult{}, nil
+ }
+
err := b.parseURLResponse(uri, &ddocsResult)
if err != nil {
return DDocsResult{}, err
|
MB-<I> shouldn't try to access ddocs for ephemeral buckets
there's no ddocs uri for ephemeral buckets - accessing returns errors.
just return empty ddocs instead.
Change-Id: I<I>d<I>b<I>f<I>dce7da3d<I>c<I>e4
Reviewed-on: <URL>
|
couchbase_go-couchbase
|
train
|
go
|
4b90c61a67684e348f6a112ab1698e19390679bc
|
diff --git a/lib/travis/decrypt_config.rb b/lib/travis/decrypt_config.rb
index <HASH>..<HASH> 100644
--- a/lib/travis/decrypt_config.rb
+++ b/lib/travis/decrypt_config.rb
@@ -12,6 +12,7 @@ module Travis
end
def run(config, r=false)
+ return decrypt(config) if config.is_a?(String)
return config unless config.respond_to?(:inject)
config.inject(config.class.new) do |result, element|
key, element = element if result.is_a?(Hash)
|
mri doesn't respond to inject on strings
|
travis-ci_travis-core
|
train
|
rb
|
cfa9ebea5671f150072d4e196959b6f52f5bc852
|
diff --git a/plotnine/tests/test_theme.py b/plotnine/tests/test_theme.py
index <HASH>..<HASH> 100644
--- a/plotnine/tests/test_theme.py
+++ b/plotnine/tests/test_theme.py
@@ -175,7 +175,7 @@ class TestThemes:
assert p + _theme == 'theme_minimal'
def test_theme_tufte(self):
- p = self.g + labs(title='Theme Tufte') + theme_tufte()
+ p = self.g + labs(title='Theme Tufte') + theme_tufte(ticks=False)
assert p + _theme == 'theme_tufte'
|
TST: Fix failing test for theme_tufte
|
has2k1_plotnine
|
train
|
py
|
24581d47893d56badfe04ab434021670438a7eb5
|
diff --git a/assets/kindeditor/kindeditor-all.js b/assets/kindeditor/kindeditor-all.js
index <HASH>..<HASH> 100644
--- a/assets/kindeditor/kindeditor-all.js
+++ b/assets/kindeditor/kindeditor-all.js
@@ -590,7 +590,7 @@ function _ready(fn) {
_bind(window, 'load', readyFunc);
}
if (_IE) {
- window.attachEvent('onunload', function() {
+ window[window.attachEvent ? 'attachEvent' : 'addEventListener']('onunload', function() {
_each(_eventData, function(key, events) {
if (events.el) {
_unbind(events.el);
|
* fix error of kindeditor in ie8 and edge.
|
easysoft_zui
|
train
|
js
|
4d4ca491ed94d71100ecf610c280aa17e4ceef0c
|
diff --git a/niworkflows/interfaces/utils.py b/niworkflows/interfaces/utils.py
index <HASH>..<HASH> 100644
--- a/niworkflows/interfaces/utils.py
+++ b/niworkflows/interfaces/utils.py
@@ -718,8 +718,7 @@ class DictMergeInputSpec(BaseInterfaceInputSpec):
class DictMergeOutputSpec(TraitedSpec):
- out_dict = traits.Either(traits.Dict, traits.Instance(OrderedDict),
- desc='Merged dictionary')
+ out_dict = traits.Dict(desc='Merged dictionary')
class DictMerge(SimpleInterface):
|
(fix) MergeDict output is currently dict as per @oesteban
|
poldracklab_niworkflows
|
train
|
py
|
78b35e657c3fc567d85127239102511ebf58ee22
|
diff --git a/src/Illuminate/Database/Connection.php b/src/Illuminate/Database/Connection.php
index <HASH>..<HASH> 100755
--- a/src/Illuminate/Database/Connection.php
+++ b/src/Illuminate/Database/Connection.php
@@ -350,7 +350,7 @@ class Connection implements ConnectionInterface
});
}
- /*
+ /**
* Run a select statement against the database and returns a generator.
*
* @param string $query
diff --git a/src/Illuminate/Session/DatabaseSessionHandler.php b/src/Illuminate/Session/DatabaseSessionHandler.php
index <HASH>..<HASH> 100644
--- a/src/Illuminate/Session/DatabaseSessionHandler.php
+++ b/src/Illuminate/Session/DatabaseSessionHandler.php
@@ -24,7 +24,7 @@ class DatabaseSessionHandler implements SessionHandlerInterface, ExistenceAwareI
*/
protected $table;
- /*
+ /**
* The number of minutes the session should be valid.
*
* @var int
|
Updated docs to PHPDocs
|
laravel_framework
|
train
|
php,php
|
e3203a5334a3dc95160f45f3ca3aaddc971d0b1f
|
diff --git a/dsmr_parser/clients/telegram_buffer.py b/dsmr_parser/clients/telegram_buffer.py
index <HASH>..<HASH> 100644
--- a/dsmr_parser/clients/telegram_buffer.py
+++ b/dsmr_parser/clients/telegram_buffer.py
@@ -51,7 +51,7 @@ class TelegramBuffer(object):
# - The checksum is optional '{0,4}' because not all telegram versions
# support it.
return re.findall(
- r'\/[^\/]+?\![A-F0-9]{0,4}\r\n',
+ r'\/[^\/]+?\![A-F0-9]{0,4}\0?\r\n',
self._buffer,
re.DOTALL
)
|
Optional NUL after checksum
My smart meter returns <I> 0D 0A after the checksum, not just 0D 0A.
|
ndokter_dsmr_parser
|
train
|
py
|
9236d1aa611918129a86d5510584e97ca07ef89f
|
diff --git a/src/phpDocumentor/Transformer/ServiceProvider.php b/src/phpDocumentor/Transformer/ServiceProvider.php
index <HASH>..<HASH> 100644
--- a/src/phpDocumentor/Transformer/ServiceProvider.php
+++ b/src/phpDocumentor/Transformer/ServiceProvider.php
@@ -47,7 +47,7 @@ class ServiceProvider extends \stdClass implements ServiceProviderInterface
$templateDir = __DIR__ . '/../../../data/templates';
// vendored installation
- if (!file_exists($templateDir . '/abstract')) {
+ if (!file_exists($templateDir)) {
$templateDir = __DIR__ . '/../../../../templates';
}
|
Remove unnecessary check; can only lead to errors
|
phpDocumentor_phpDocumentor2
|
train
|
php
|
bd084710f1506b0b9435570d44e43fa3724b58d6
|
diff --git a/gcloud/datastore/demo/__init__.py b/gcloud/datastore/demo/__init__.py
index <HASH>..<HASH> 100644
--- a/gcloud/datastore/demo/__init__.py
+++ b/gcloud/datastore/demo/__init__.py
@@ -10,5 +10,5 @@ DATASET_ID = 'gcloud-datastore-demo'
PRIVATE_KEY_PATH = os.path.join(os.path.dirname(__file__), 'demo.key')
-def get_dataset():
+def get_dataset(): #pragma NO COVER
return datastore.get_dataset(DATASET_ID, CLIENT_EMAIL, PRIVATE_KEY_PATH)
|
Don't try covering 'demo' subpackage.
|
googleapis_google-cloud-python
|
train
|
py
|
0c81ac0eea52a9c364ab6c384313e41a5748d597
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -2,7 +2,7 @@ from setuptools import setup, find_packages
setup(
name='jupyterhub-tmpauthenticator',
- version='0.1',
+ version='0.2',
description='JupyterHub authenticator that hands out temporary accounts for everyone',
url='https://github.com/yuvipanda/jupyterhub-tmpauthenticator',
author='Yuvi Panda',
|
Version bump
Turns out the wheels for <I> included an old version of dummyauthenticator
(which I used as a base for this) because I had an unclean workspace!
This just gives a clean wheel
|
jupyterhub_tmpauthenticator
|
train
|
py
|
6b412b510425c862d17bb24285757e169ba1fbce
|
diff --git a/wpull/document.py b/wpull/document.py
index <HASH>..<HASH> 100644
--- a/wpull/document.py
+++ b/wpull/document.py
@@ -100,13 +100,15 @@ class HTMLReader(BaseDocumentReader):
@classmethod
def is_html_url_info(cls, url_info):
'''Return whether the URLInfo is likely to be a HTML.'''
- if '.htm' in url_info.path.lower():
+ path = url_info.path.lower()
+ if '.htm' in path or '.dhtm' in path:
return True
@classmethod
def is_html_file(cls, file):
'''Return whether the file is likely to be HTML.'''
- peeked_data = wpull.util.peek_file(file).replace(b'\x00', b'').lower()
+ peeked_data = wpull.util.printable_bytes(
+ wpull.util.peek_file(file)).lower()
if b'<!doctype html' in peeked_data \
or b'<head' in peeked_data \
|
document.py: Uses printable_bytes(). Checks also for .dhtml files.
|
ArchiveTeam_wpull
|
train
|
py
|
0d67352a04c9195d5e77b171a553e9b96fb6c5a1
|
diff --git a/test/twin/setup_test.rb b/test/twin/setup_test.rb
index <HASH>..<HASH> 100644
--- a/test/twin/setup_test.rb
+++ b/test/twin/setup_test.rb
@@ -12,15 +12,15 @@ class TwinSetupTest < MiniTest::Spec
class Album < Disposable::Twin
property :id
property :name
- collection :songs, twin: lambda { |*| Song }
- property :artist, twin: lambda { |*| Artist }
+ collection :songs, twin: lambda { |*| Twin::Song }
+ property :artist, twin: lambda { |*| Twin::Artist }
include Setup
end
- class Song < Disposable::Twin
+ class Song_ < Disposable::Twin
property :id
- property :composer, twin: lambda { |*| Artist }
+ property :composer, twin: lambda { |*| Twin::Artist }
include Setup
end
|
[ci skip] make test break, because it was wrong.
|
apotonick_disposable
|
train
|
rb
|
232449b1134156488c30ec555fbba88bc85a6abf
|
diff --git a/cmd/rqlited/main.go b/cmd/rqlited/main.go
index <HASH>..<HASH> 100644
--- a/cmd/rqlited/main.go
+++ b/cmd/rqlited/main.go
@@ -85,7 +85,7 @@ const desc = `rqlite is a lightweight, distributed relational database, which us
storage engine. It provides an easy-to-use, fault-tolerant store for relational data.`
func init() {
- flag.StringVar(&nodeID, "node-id", "", "Unique name for node. If not set, set to hostname")
+ flag.StringVar(&nodeID, "node-id", "", "Unique name for node. If not set, set to Raft address")
flag.StringVar(&httpAddr, "http-addr", "localhost:4001", "HTTP server bind address. For HTTPS, set X.509 cert and key")
flag.StringVar(&httpAdv, "http-adv-addr", "", "Advertised HTTP address. If not set, same as HTTP server")
flag.StringVar(&joinSrcIP, "join-source-ip", "", "Set source IP address during Join request")
|
Correct help text for -node-id
|
rqlite_rqlite
|
train
|
go
|
0fdd7fd03ccd154509f808ee56d268b85d5d48f5
|
diff --git a/src/PlaygroundUser/Module.php b/src/PlaygroundUser/Module.php
index <HASH>..<HASH> 100755
--- a/src/PlaygroundUser/Module.php
+++ b/src/PlaygroundUser/Module.php
@@ -103,7 +103,8 @@ class Module
} else {
$anonymousId = uniqid('pg_', true);
}
- $cookie = new \Zend\Http\Header\SetCookie('pg_anonymous', $anonymousId, time() + 60*60*24*365,'/');
+ // Set the cookie as long as possible (limited by integer max in 32 bits
+ $cookie = new \Zend\Http\Header\SetCookie('pg_anonymous', $anonymousId, 2147483647,'/');
$e->getResponse()->getHeaders()->addHeader($cookie);
}
}
|
fixing cookie length in <I> bits
|
gregorybesson_PlaygroundUser
|
train
|
php
|
f42be4ca1a7de702edf539bf7b58f9e09b545554
|
diff --git a/lib/snowplow-tracker/version.rb b/lib/snowplow-tracker/version.rb
index <HASH>..<HASH> 100644
--- a/lib/snowplow-tracker/version.rb
+++ b/lib/snowplow-tracker/version.rb
@@ -14,6 +14,6 @@
# License:: Apache License Version 2.0
module SnowplowTracker
- VERSION = '0.5.0'
+ VERSION = '0.5.1'
TRACKER_VERSION = "rb-#{VERSION}"
end
|
Bumped version to <I>
|
snowplow_snowplow-ruby-tracker
|
train
|
rb
|
ab7ea765934703b26cdb791fe7ca5c75d93c7f19
|
diff --git a/source/script/main.js b/source/script/main.js
index <HASH>..<HASH> 100644
--- a/source/script/main.js
+++ b/source/script/main.js
@@ -13,10 +13,12 @@
'&': '&',
'<': '<',
'>': '>',
+ '"': '"',
+ "'": '''
}
function escapeHtml(string) {
- return String(string).replace(/[&<>]/g, function(s) {
+ return String(string).replace(/[&<>'"]/g, function(s) {
return escapeEntityMap[s];
});
}
|
Have 'escapeHtml' also escape quotes, not just <, >, &.
|
meteor_meteor-theme-hexo
|
train
|
js
|
b5d35c7e092f0518415f47cc86d8cf56171100a5
|
diff --git a/cmd/metrics-v2.go b/cmd/metrics-v2.go
index <HASH>..<HASH> 100644
--- a/cmd/metrics-v2.go
+++ b/cmd/metrics-v2.go
@@ -27,6 +27,7 @@ import (
"sync/atomic"
"time"
+ "github.com/minio/madmin-go"
"github.com/minio/minio/internal/bucket/lifecycle"
"github.com/minio/minio/internal/logger"
"github.com/prometheus/client_golang/prometheus"
@@ -1614,9 +1615,15 @@ func getLocalDiskStorageMetrics() *MetricsGroup {
return
}
- metrics = make([]Metric, 0, 50)
storageInfo, _ := objLayer.LocalStorageInfo(ctx)
+ if storageInfo.Backend.Type == madmin.FS {
+ return
+ }
+ metrics = make([]Metric, 0, 50)
for _, disk := range storageInfo.Disks {
+ if disk.Metrics == nil {
+ continue
+ }
for apiName, latency := range disk.Metrics.APILatencies {
val := latency.(uint64)
metrics = append(metrics, Metric{
|
ignore disk metrics for single drive mode (#<I>)
fixes #<I>
|
minio_minio
|
train
|
go
|
28c813aabbe5206f4d8dffe99f9cbf108ed69c01
|
diff --git a/pdblp/_version.py b/pdblp/_version.py
index <HASH>..<HASH> 100644
--- a/pdblp/_version.py
+++ b/pdblp/_version.py
@@ -1 +1 @@
-__version__ = "0.1.4"
+__version__ = "0.1.5"
|
Update version to <I> for upcoming release
|
matthewgilbert_pdblp
|
train
|
py
|
c4be86b6514219b60caa5cf271efac0ea103567d
|
diff --git a/src/index.js b/src/index.js
index <HASH>..<HASH> 100644
--- a/src/index.js
+++ b/src/index.js
@@ -68,7 +68,8 @@ function main(opts) {
formatStatusCode: main.normalizeStatusCode,
metricType: 'histogram',
promClient: {},
- promRegistry: promClient.register
+ promRegistry: promClient.register,
+ metricsApp: null,
}, opts
);
@@ -204,6 +205,13 @@ function main(opts) {
next();
};
+ if (opts.metricsApp) {
+ opts.metricsApp.get(opts.metricsPath || '/metrics', async (req, res, next) => {
+ res.set('Content-Type', opts.promRegistry.contentType);
+ return res.end(await opts.promRegistry.metrics());
+ });
+ }
+
middleware.metrics = metrics;
middleware.promClient = promClient;
middleware.metricsMiddleware = metricsMiddleware;
|
Add metricsApp option
Lets you supply an additional express app on which to mount the metrics
endpoint
|
jochen-schweizer_express-prom-bundle
|
train
|
js
|
bda9b240b62869f303a8b58cdc467487250d8e6f
|
diff --git a/lib/env.js b/lib/env.js
index <HASH>..<HASH> 100644
--- a/lib/env.js
+++ b/lib/env.js
@@ -4,9 +4,15 @@ const jessy = require('jessy');
const keys = require('./keys');
module.exports.path = (pathEnv, delimiter, cwdEnv, sep) => {
- const result = pathEnv + delimiter + cwdEnv + sep + 'node_modules/.bin';
-
- return result;
+ return [
+ cwdEnv,
+ sep,
+ 'node_modules',
+ sep,
+ '.bin',
+ delimiter,
+ pathEnv
+ ].join('');
};
module.exports.config = (config) => {
diff --git a/test/env.js b/test/env.js
index <HASH>..<HASH> 100644
--- a/test/env.js
+++ b/test/env.js
@@ -6,7 +6,7 @@ let env = require('../lib/env');
test('env: $PATH', (t) => {
let path = env.path('hello:world', ':', 'home', '/');
- t.equal(path, 'hello:world:home/node_modules/.bin', 'should build PATH');
+ t.equal(path, 'home/node_modules/.bin:hello:world', 'should build PATH');
t.end();
});
|
fix(env) globally installed modules first then node_modules/.bin -> node_modules/.bin and then global
|
coderaiser_redrun
|
train
|
js,js
|
30ace3c6477b0d93ba22031cbb4b20b05c2559d5
|
diff --git a/Gruntfile.js b/Gruntfile.js
index <HASH>..<HASH> 100644
--- a/Gruntfile.js
+++ b/Gruntfile.js
@@ -61,6 +61,17 @@ module.exports = function(grunt) {
'<%= jshint.all %>'
],
tasks: ['jshint', 'uglify', 'version']
+ },
+ livereload: {
+ // Browser live reloading
+ // https://github.com/gruntjs/grunt-contrib-watch#live-reloading
+ options: {
+ livereload: false
+ },
+ files: [
+ 'assets/css/main.min.css',
+ 'assets/js/scripts.min.js'
+ ]
}
},
clean: {
|
Added browser live reloading to Grunt
|
roots_sage
|
train
|
js
|
7b0e012f6220fd007069f7648d70e2aa35a3aaae
|
diff --git a/patroni/dcs/consul.py b/patroni/dcs/consul.py
index <HASH>..<HASH> 100644
--- a/patroni/dcs/consul.py
+++ b/patroni/dcs/consul.py
@@ -55,6 +55,8 @@ class HTTPClient(object):
kwargs['ca_certs'] = ca_cert
if verify or ca_cert:
kwargs['cert_reqs'] = ssl.CERT_REQUIRED
+ else:
+ kwargs['cert_reqs'] = ssl.CERT_NONE
self.http = urllib3.PoolManager(num_pools=10, **kwargs)
self._ttl = None
|
Disable SSL verification for Consul when it is required (#<I>)
Consul client uses urllib3 with a verify=True by default. When
SSL verification is disabled with verify=False, we can see
CERTIFICATE_VERIFY_FAILED exceptions. With urllib3 <I>-1 on
Debian Stretch, the "cert_reqs" argument must be explicitaly set
to ssl.CERT_NONE to effectively disable SSL verification.
|
zalando_patroni
|
train
|
py
|
47ff597b8d72e18c6433c261382fc7c950292f52
|
diff --git a/src/main/java/com/tlswe/awsmock/ec2/servlet/MockEc2EndpointServlet.java b/src/main/java/com/tlswe/awsmock/ec2/servlet/MockEc2EndpointServlet.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/tlswe/awsmock/ec2/servlet/MockEc2EndpointServlet.java
+++ b/src/main/java/com/tlswe/awsmock/ec2/servlet/MockEc2EndpointServlet.java
@@ -35,11 +35,15 @@ public class MockEc2EndpointServlet extends HttpServlet {
* throw an I/O exception in case of failing to get the httpServletResponse's writer
*/
@Override
- @SuppressWarnings("unchecked")
protected final void doGet(final HttpServletRequest request,
final HttpServletResponse response) throws
IOException {
+ @SuppressWarnings("unchecked")
+ /*-
+ * As request.getParameterMap() in servlet-api-2.5 provides return type of raw java.util.Map,
+ * we suppress the type safety check warning here.
+ */
Map<String, String[]> queryParams = (Map<String, String[]>) request
.getParameterMap();
|
Explain why we preserve unchecked warnings
|
treelogic-swe_aws-mock
|
train
|
java
|
e9b67c330923cb77fe59f2e2aec78f8254055f29
|
diff --git a/noxfile.py b/noxfile.py
index <HASH>..<HASH> 100644
--- a/noxfile.py
+++ b/noxfile.py
@@ -263,6 +263,10 @@ def _runtests(session, coverage, transport, cmd_args):
else:
session.run('python', os.path.join('tests', 'runtests.py'), *cmd_args)
except CommandFailed:
+ # Disabling re-running failed tests for the time being
+ raise
+
+ # pylint: disable=unreachable
names_file_path = os.path.join('artifacts', 'failed-tests.txt')
session.log('Re-running failed tests if possible')
session.install('xunitparser==1.3.3', silent=PIP_INSTALL_SILENT)
@@ -309,6 +313,7 @@ def _runtests(session, coverage, transport, cmd_args):
_run_with_coverage(session, 'coverage', 'run', '-m', 'tests.runtests', *cmd_args)
else:
session.run('python', os.path.join('tests', 'runtests.py'), *cmd_args)
+ # pylint: enable=unreachable
@nox.session(python=_PYTHON_VERSIONS, name='runtests-parametrized')
|
Disable re-runing failed tests for now
|
saltstack_salt
|
train
|
py
|
2d9e0d8ab3f535808c0639823631686ae9f5e13b
|
diff --git a/util/collate/collate.go b/util/collate/collate.go
index <HASH>..<HASH> 100644
--- a/util/collate/collate.go
+++ b/util/collate/collate.go
@@ -15,6 +15,7 @@
package collate
import (
+ "fmt"
"sync/atomic"
"github.com/pingcap/errors"
@@ -217,7 +218,7 @@ func SubstituteMissingCollationToDefault(co string) string {
if _, err = GetCollationByName(co); err == nil {
return co
}
- logutil.BgLogger().Warn(err.Error())
+ logutil.BgLogger().Warn(fmt.Sprintf("The collation %s specified on connection is not supported when new collation is enabled, switch to the default collation: %s", co, mysql.DefaultCollationName))
var coll *charset.Collation
if coll, err = GetCollationByName(charset.CollationUTF8MB4); err != nil {
logutil.BgLogger().Warn(err.Error())
|
session: improve the log for unsupport collation when connecting (#<I>)
close pingcap/tidb#<I>
|
pingcap_tidb
|
train
|
go
|
3d0df720068650c9db5850160d1b35928ecc739d
|
diff --git a/simulation.py b/simulation.py
index <HASH>..<HASH> 100644
--- a/simulation.py
+++ b/simulation.py
@@ -1170,16 +1170,19 @@ def run(models, years=None, data_out=None, out_interval=1):
print('Running year {}'.format(year))
logger.debug('running year {}'.format(year))
+ t1 = time.time()
for model_name in models:
print('Running model {!r}'.format(model_name))
with log_start_finish(
'run model {!r}'.format(model_name), logger, logging.INFO):
model = get_model(model_name)
- t1 = time.time()
+ t2 = time.time()
model()
- print("Time to execute model = %.3fs" % (time.time()-t1))
- logger.info("Time to execute model = %.3fs" % (time.time()-t1))
+ print("Time to execute model = %.3fs" % (time.time()-t2))
+ logger.info("Time to execute model = %.3fs" % (time.time()-t2))
+ print("Time to execute year = %.3fs" % (time.time()-t1))
+ logger.info("Time to execute year = %.3fs" % (time.time()-t1))
year_counter += 1
if data_out:
|
print the time that models take to run - everyone should be aware, right?
|
UDST_orca
|
train
|
py
|
d5c0dea10faf4324b4fc1f53718870a5f5aab836
|
diff --git a/fedmsg/tests/test_text.py b/fedmsg/tests/test_text.py
index <HASH>..<HASH> 100644
--- a/fedmsg/tests/test_text.py
+++ b/fedmsg/tests/test_text.py
@@ -1051,6 +1051,7 @@ class TestPkgdb2BrRunComplete(Base):
"msg": {
"agent": "limburgher",
"unbranchedPackages": [],
+ "branchedPackages": ["nethack"],
},
}
|
Add new field to pkgdb2branch for the tests.
|
fedora-infra_fedmsg
|
train
|
py
|
06396dd3cb38cc5b3a294734a39f82e606bba4ac
|
diff --git a/pfurl/pfurl.py b/pfurl/pfurl.py
index <HASH>..<HASH> 100644
--- a/pfurl/pfurl.py
+++ b/pfurl/pfurl.py
@@ -813,6 +813,7 @@ class Pfurl():
str_msg = 'Removed existing local path... '
self.qprint('Creating empty local path %s...' % str_localPathFull)
os.makedirs(str_localPathFull)
+ b_exists = True
str_msg += 'Created new local path'
else:
str_msg = 'local path already exists!'
|
Fix bug that reports deep dir tree creation as false.
|
FNNDSC_pfurl
|
train
|
py
|
ecfc4f9cf36094d9f61837b4f41645d913d52c4d
|
diff --git a/lib/parameters/instance_param.rb b/lib/parameters/instance_param.rb
index <HASH>..<HASH> 100644
--- a/lib/parameters/instance_param.rb
+++ b/lib/parameters/instance_param.rb
@@ -63,11 +63,13 @@ module Parameters
end
#
+ # Inspects the instance parameter.
+ #
# @return [String]
# Inspection of the instance params value.
#
def inspect
- value.inspect
+ "#<#{self.class}: #{value.inspect}>"
end
end
|
Updated InstanceParam#inspect to not confuse developers in irb.
|
postmodern_parameters
|
train
|
rb
|
26e9c23f4061e40a9fe7ef13d44c8102df3c2cfa
|
diff --git a/src/aria/core/environment/Customizations.js b/src/aria/core/environment/Customizations.js
index <HASH>..<HASH> 100644
--- a/src/aria/core/environment/Customizations.js
+++ b/src/aria/core/environment/Customizations.js
@@ -34,7 +34,6 @@ Aria.classDefinition({
}
},
$constructor : function () {
- this.$EnvironmentBase.constructor.call(this);
/**
* True if a customization has been configured (see setEnvironment() )
* @protected
@@ -62,6 +61,7 @@ Aria.classDefinition({
templates : {},
modules : {}
};
+ this.$EnvironmentBase.constructor.call(this);
},
$prototype : {
/**
|
Fixes #<I>: setEnvironment settings abandoned by loading Customization
|
ariatemplates_ariatemplates
|
train
|
js
|
31c68f58152c0a2d81948d63a9f7b13f33083a59
|
diff --git a/src/Message/FindCustomerRequest.php b/src/Message/FindCustomerRequest.php
index <HASH>..<HASH> 100644
--- a/src/Message/FindCustomerRequest.php
+++ b/src/Message/FindCustomerRequest.php
@@ -3,7 +3,7 @@
namespace Omnipay\Braintree\Message;
use Braintree\Exception\NotFound;
-use Omnipay\Common\Exception\NotFoundException;
+use Symfony\Component\HttpKernel\Exception\NotFoundHttpException;
/**
* Find Customer Request
@@ -22,14 +22,14 @@ class FindCustomerRequest extends AbstractRequest
* @param mixed $data
*
* @return \Omnipay\Braintree\Message\CustomerResponse|\Omnipay\Common\Message\ResponseInterface
- * @throws \Omnipay\Common\Exception\NotFoundException
+ * @throws \Symfony\Component\HttpKernel\Exception\NotFoundHttpException
*/
public function sendData($data)
{
try {
$response = $this->braintree->customer()->find($this->getCustomerId());
} catch (NotFound $exception) {
- throw new NotFoundException($exception->getMessage());
+ throw new NotFoundHttpException($exception->getMessage());
}
return $this->response = new CustomerResponse($this, $response);
|
Reverted back to Symphony's NotFoundHttpException until the release of Omnipay Common
|
thephpleague_omnipay-braintree
|
train
|
php
|
dcf50d35947c013ae36cbba9a5914bb56fd7d0cd
|
diff --git a/tests/datasets_test.py b/tests/datasets_test.py
index <HASH>..<HASH> 100644
--- a/tests/datasets_test.py
+++ b/tests/datasets_test.py
@@ -44,7 +44,9 @@ class DatasetsTest(unittest.TestCase):
pth = op.join("./tmp/", "biodur_sample")
self.assertTrue(op.exists(pth))
+ @attr('slow')
def test_get(self):
+ io3d.datasets.download("3Dircadb1.1")
io3d.datasets.get("3Dircadb1", "*1/P*")
if __name__ == "__main__":
|
attribute slow to ircad test
|
mjirik_io3d
|
train
|
py
|
1635454755fa8fdc0dedf032c543d3f4006aa568
|
diff --git a/lib/puppet/application/string_base.rb b/lib/puppet/application/string_base.rb
index <HASH>..<HASH> 100644
--- a/lib/puppet/application/string_base.rb
+++ b/lib/puppet/application/string_base.rb
@@ -119,7 +119,7 @@ class Puppet::Application::StringBase < Puppet::Application
end
def validate
unless @action
- raise "You must specify #{string.actions.join(", ")} as a verb; 'save' probably does not work right now"
+ raise "You must specify #{string.actions.join(", ")} as a verb"
end
end
end
|
(#<I>) Remove "save does not work" language from strings.
Now we are pushing into production we can eliminate this language, which was a
legacy from the prototype that is no longer relevant globally.
|
puppetlabs_puppet
|
train
|
rb
|
6ad0bb692a4757165d09b8e3f50df389c7c75370
|
diff --git a/src/Components/AlterOperation.php b/src/Components/AlterOperation.php
index <HASH>..<HASH> 100644
--- a/src/Components/AlterOperation.php
+++ b/src/Components/AlterOperation.php
@@ -293,13 +293,13 @@ class AlterOperation extends Component
$list->getNext();
$nextToken = $list->getNext();
- if ($token->value === 'CHARACTER SET'){
+ if ($token->value === 'CHARACTER SET') {
// Reverting the changes we made in the beginning
$list->idx = $currentTokenID;
- } else if ($token->value === 'SET' && $nextToken->value === '('){
+ } else if ($token->value === 'SET' && $nextToken !== null && $nextToken->value === '(') {
// To avoid adding the tokens between the SET() parentheses to the unknown tokens
$list->getNextOfTypeAndValue(Token::TYPE_OPERATOR, ')');
- } else if ($token->value === 'SET' && $nextToken->value === 'DEFAULT'){
+ } else if ($token->value === 'SET' && $nextToken !== null && $nextToken->value === 'DEFAULT') {
// to avoid adding the `DEFAULT` token to the unknown tokens.
++$list->idx;
} else {
|
Ref #<I> - Add type checks in case a null value occurs
Pull-request: #<I>
|
phpmyadmin_sql-parser
|
train
|
php
|
637d05331dafe4be5ec9f101f1caae942e80f25c
|
diff --git a/src/main/java/fr/pilato/elasticsearch/crawler/fs/client/BulkProcessor.java b/src/main/java/fr/pilato/elasticsearch/crawler/fs/client/BulkProcessor.java
index <HASH>..<HASH> 100644
--- a/src/main/java/fr/pilato/elasticsearch/crawler/fs/client/BulkProcessor.java
+++ b/src/main/java/fr/pilato/elasticsearch/crawler/fs/client/BulkProcessor.java
@@ -222,6 +222,7 @@ public class BulkProcessor {
* @return a bulk processor
*/
public static BulkProcessor simpleBulkProcessor(ElasticsearchClient client, int bulkSize, TimeValue flushInterval, String pipeline) {
+ logger.debug("Creating a bulk processor with size [{}], flush [{}], pipeline [{}]", bulkSize, flushInterval, pipeline);
return builder(client, new Listener() {
@Override
public void beforeBulk(long executionId, BulkRequest request) {
|
Adding a debug line for #<I>.
|
dadoonet_fscrawler
|
train
|
java
|
Subsets and Splits
Java Commits in Train Set
Queries for all entries where the diff_languages column is 'java', providing a filtered dataset but without deeper analysis.
Java Commits Test Data
Returns a subset of 5000 entries from the dataset where the programming language difference is Java, providing basic filtering for exploration.
Java Commits Sample
Retrieves the first 1,000 records where the 'diff_languages' column is 'java', providing limited insight into the specific data entries.
Java Commits Validation Sample
Retrieves a sample of entries from the validation dataset where the diff languages are Java, providing limited insight into specific Java-related data points.
Java Commits in Validation
This query retrieves a limited sample of entries from the validation dataset where the programming language difference is Java, providing basic filtering with minimal insight.
Java Commits Sample
This query retrieves a sample of 100 records where the 'diff_languages' is 'java', providing basic filtering but limited analytical value.
Java Commits Sample
Retrieves 100 samples where the language difference is Java, providing basic filtering but minimal analytical value.
Java Commits Sample
Retrieves 10 samples where the diff_languages column is 'java', providing basic examples of data entries with this specific language.
Java Commits Validation Sample
Retrieves 1,000 records where the differences in languages are marked as Java, providing a snapshot of that specific subset but limited to raw data.
Java Commits Sample
This query retrieves 1000 random samples from the dataset where the programming language is Java, offering limited insight beyond raw data.