hash
stringlengths 40
40
| diff
stringlengths 131
26.7k
| message
stringlengths 7
694
| project
stringlengths 5
67
| split
stringclasses 1
value | diff_languages
stringlengths 2
24
|
---|---|---|---|---|---|
6630ead8beb696863a17ae64cf498c6b5ea6e7d8
|
diff --git a/mt940/__about__.py b/mt940/__about__.py
index <HASH>..<HASH> 100644
--- a/mt940/__about__.py
+++ b/mt940/__about__.py
@@ -6,7 +6,7 @@ A library to parse MT940 files and returns smart Python collections for
statistics and manipulation.
'''.strip().split())
__email__ = '[email protected]'
-__version__ = '4.7'
+__version__ = '4.8.0'
__license__ = 'BSD'
__copyright__ = 'Copyright 2015 Rick van Hattem (wolph)'
__url__ = 'https://github.com/WoLpH/mt940'
|
Incrementing version to <I>
|
WoLpH_mt940
|
train
|
py
|
5dbb2a3548e2bc297ea49e47c22be4041becd1b3
|
diff --git a/abl/vpath/base/localfs.py b/abl/vpath/base/localfs.py
index <HASH>..<HASH> 100755
--- a/abl/vpath/base/localfs.py
+++ b/abl/vpath/base/localfs.py
@@ -172,3 +172,6 @@ class LocalFileSystem(FileSystem):
if not self.supports_symlinks():
raise OperationIsNotSupportedOnPlatform
return os.symlink(self._path(target), self._path(link_name))
+
+ def dump(self, outf, no_binary=False):
+ pass
diff --git a/abl/vpath/base/memory.py b/abl/vpath/base/memory.py
index <HASH>..<HASH> 100644
--- a/abl/vpath/base/memory.py
+++ b/abl/vpath/base/memory.py
@@ -449,6 +449,7 @@ class MemoryFileSystem(FileSystem):
elif value.kind == NodeKind.LINK:
outf.write("LINK: %s%s -> %s\n" % (path, name, value.target))
else:
+ outf.write("DIR: %s%s\n" % (path, name))
traverse(value,
(path[:-1] if path.endswith("/") else path) + "/" + name + "/")
|
Improve memory backend dumping to show dirs
In addition give localfs a (nop) dump function.
|
AbletonAG_abl.vpath
|
train
|
py,py
|
5c7b23c7c06e5e399fa8c088bca5a40f9410737f
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -17,7 +17,6 @@ setup(
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7",
- "Topic :: System :: Logging",
],
packages=[
'pypercube',
|
Remove topic 'logging' from setup.py
|
sbuss_pypercube
|
train
|
py
|
0be9f168fa42580d289ad6cc9be201e09e4668cb
|
diff --git a/spec/docstring_parser_spec.rb b/spec/docstring_parser_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/docstring_parser_spec.rb
+++ b/spec/docstring_parser_spec.rb
@@ -1,6 +1,10 @@
require File.dirname(__FILE__) + "/spec_helper"
describe YARD::DocstringParser do
+ after(:all) do
+ YARD::Registry.clear
+ end
+
def parse(content, object = nil, handler = nil)
@library ||= Tags::Library.instance
@parser = DocstringParser.new(@library)
|
Cleanup after docstring parser spec
|
lsegal_yard
|
train
|
rb
|
f9b91f111ee5729dd4dc3a03ad84ca20a5be91a0
|
diff --git a/d1_common_python/src/tests/test_utils.py b/d1_common_python/src/tests/test_utils.py
index <HASH>..<HASH> 100644
--- a/d1_common_python/src/tests/test_utils.py
+++ b/d1_common_python/src/tests/test_utils.py
@@ -77,6 +77,19 @@ class TestUtils(unittest.TestCase):
c2.algorithm = 'MD5'
self.assertTrue(d1_common.util.checksums_are_equal(c1, c2))
+ def test_055(self):
+ '''get_checksum_calculator_by_dataone_designator() returns a checksum calculator'''
+ calculator = d1_common.util.get_checksum_calculator_by_dataone_designator('SHA-1')
+ calculator.update('test')
+ self.assertTrue(calculator.hexdigest())
+
+ def test_056(self):
+ '''get_checksum_calculator_by_dataone_designator() raises on invalid algorithm'''
+ self.assertRaises(
+ Exception, d1_common.util.get_checksum_calculator_by_dataone_designator,
+ 'SHA-224-bogus'
+ )
+
#===============================================================================
|
Since the get_checksum_calculator_by_dataone_designator method moved from
d1_instance_generator.checksum to d1_common.util, the tests are being moved.
|
DataONEorg_d1_python
|
train
|
py
|
209aaf3ba8b1399ab934cc3f91d31bea2b1ed193
|
diff --git a/lib/kynetx_am_api/application.rb b/lib/kynetx_am_api/application.rb
index <HASH>..<HASH> 100644
--- a/lib/kynetx_am_api/application.rb
+++ b/lib/kynetx_am_api/application.rb
@@ -278,7 +278,7 @@ module KynetxAmApi
return true
else
puts "ERROR SAVING KRL: #{response.inspect}" if $DEBUG
- raise KRLParseError "Unable to parse the KRL", response["error"]
+ raise KRLParseError.new "Unable to parse the KRL", response["error"]
end
end
diff --git a/lib/kynetx_am_api/krl_parse_error.rb b/lib/kynetx_am_api/krl_parse_error.rb
index <HASH>..<HASH> 100644
--- a/lib/kynetx_am_api/krl_parse_error.rb
+++ b/lib/kynetx_am_api/krl_parse_error.rb
@@ -3,10 +3,7 @@ class KRLParseError < StandardError
attr_reader :parse_errors
def initialize(msg, errors)
- if errors.class == String
- msg = errors
- @parse_errors = errors
- elsif errors.class == Array
+ if errors.class == Array
@parse_errors = errors
else
@parse_errors = errors.to_s
|
working with new error handling on parse errors
|
kynetx_Kynetx-Application-Manager-API
|
train
|
rb,rb
|
61584cb12f34d4639b5e37dd07cc3fd672bc114b
|
diff --git a/docs/conf.py b/docs/conf.py
index <HASH>..<HASH> 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -58,7 +58,7 @@ copyright = '2016, Amir Rachum'
# The short X.Y version.
version = pydocstyle.__version__
# The full version, including alpha/beta/rc tags.
-release = '1.0.0'
+release = pydocstyle.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
|
Correctly set 'release' in sphinx config. (#<I>)
|
PyCQA_pydocstyle
|
train
|
py
|
6c589b5843963203874e330340f6b86253b41e28
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100755
--- a/setup.py
+++ b/setup.py
@@ -34,6 +34,12 @@ if sys.argv[-1] == 'tag':
_run("git push --tags")
sys.exit()
+if sys.argv[-1] == 'up':
+ _run('cd Utils; git up; cd ..')
+ _run('cd GeneAnnotation; git up; cd ..')
+ _run('git up')
+ sys.exit()
+
if sys.argv[-1] == 'install':
print("""-----------------------------------
Installing TargQC version {}
|
Add "setup.py up" command to update"
|
vladsaveliev_TargQC
|
train
|
py
|
a20b415ecb73c7801337aec6a9d5140f680da8f3
|
diff --git a/pynes/tests/__init__.py b/pynes/tests/__init__.py
index <HASH>..<HASH> 100644
--- a/pynes/tests/__init__.py
+++ b/pynes/tests/__init__.py
@@ -73,7 +73,7 @@ class HexTestCase(TestCase):
while (cursor < len(expected) or cursor < len(actual)):
for a in range(16):
if cursor < len(expected) and cursor < len(actual):
- if expected[cursor] != actual[cursor]:
+ if expected[cursor] != actual[cursor] and line not in lines:
lines.append(line)
cursor += 1
line += 1
@@ -86,7 +86,7 @@ class HexTestCase(TestCase):
cursor = (line * 16)+ a
if cursor < len(expected) and cursor < len(actual):
if expected[cursor] != actual[cursor]:
- exp += '%s%02x%s' % (FAIL, ord(expected[cursor]), ENDC)
+ exp += '%s%02x%s' % (OKGREEN, ord(expected[cursor]), ENDC)
act += '%s%02x%s' % (FAIL, ord(actual[cursor]), ENDC)
else:
exp += '%02x' % ord(expected[cursor])
|
fixed HexTestCase was showing some lines more than once
|
gutomaia_nesasm_py
|
train
|
py
|
b18a6feb7dcf69562d08168c297a2c88e803c983
|
diff --git a/lib/isono/agent.rb b/lib/isono/agent.rb
index <HASH>..<HASH> 100644
--- a/lib/isono/agent.rb
+++ b/lib/isono/agent.rb
@@ -10,6 +10,12 @@ module Isono
include EventObservable
include ManagerHost
+ def self.inherited(klass)
+ klass.class_eval {
+ include Logger
+ }
+ end
+
def self.instance
@instance
end
|
force to mixin Logger module for inherited classes from Isono::Agent.
|
axsh_isono
|
train
|
rb
|
216f08f279f96c0717c2ebada4eaacd8a35fb882
|
diff --git a/lib/eval.js b/lib/eval.js
index <HASH>..<HASH> 100644
--- a/lib/eval.js
+++ b/lib/eval.js
@@ -44,7 +44,11 @@ const runWithTimeout = (fn, ms, msg) => {
module.exports = (puppeteer, options) => {
const DEBUGGING = process.env.CHROME_PAGE_EVAL_DEBUGGING != null
- let launchOptions = Object.assign({}, options.launchOptions)
+ let launchOptions = Object.assign({
+ defaultViewport: null
+ }, options.launchOptions, {
+ args: [`--window-size=1280,1024`, ...(options.launchOptions != null && Array.isArray(options.launchOptions.args) ? options.launchOptions.args : [])]
+ })
if (DEBUGGING) {
debugMe(
@@ -109,6 +113,7 @@ module.exports = (puppeteer, options) => {
if (viewport != null) {
debugMe('using custom viewport:', viewport)
+ await page.setViewport(viewport)
} else {
debugMe('using default puppeteer viewport')
}
|
don’t use a default viewport (which means that it will take all available space of window) and set a higher window size
|
bjrmatos_chrome-page-eval
|
train
|
js
|
016e6c04436842e4ea997953c415c94f643274f4
|
diff --git a/tests/tests.py b/tests/tests.py
index <HASH>..<HASH> 100644
--- a/tests/tests.py
+++ b/tests/tests.py
@@ -40,6 +40,11 @@ def test_engum_to_str_small():
assert str(EngNumber(-0.220000125)) == '-220m'
+def test_1000f():
+ assert str(EngNumber('1000f')) == '1p'
+ assert str(EngNumber('1p')) == '1p'
+
+
def test_engnum_significant():
assert str(EngNumber('220m', significant=0)) == '220m'
assert str(EngNumber('220m', significant=1)) == '200m'
|
Adding testing for '<I>f' in an attempt to repeat a user-reported bug
|
slightlynybbled_engineering_notation
|
train
|
py
|
84abab2aa812e6527f60c43f5e9f66297db92ead
|
diff --git a/forms/DropdownField.php b/forms/DropdownField.php
index <HASH>..<HASH> 100644
--- a/forms/DropdownField.php
+++ b/forms/DropdownField.php
@@ -84,7 +84,7 @@
class DropdownField extends FormField {
/**
- * @var boolean $source Associative or numeric array of all dropdown items,
+ * @var Array $source Associative or numeric array of all dropdown items,
* with array key as the submitted field value, and the array value as a
* natural language description shown in the interface element.
*/
|
Update forms/DropdownField.php
small fix in comments
|
silverstripe_silverstripe-framework
|
train
|
php
|
952801a9ce5c1db905bdede3141ff5bb7f515e4d
|
diff --git a/UI/src/app/dashboard/core/data-factories/chatops-data.js b/UI/src/app/dashboard/core/data-factories/chatops-data.js
index <HASH>..<HASH> 100644
--- a/UI/src/app/dashboard/core/data-factories/chatops-data.js
+++ b/UI/src/app/dashboard/core/data-factories/chatops-data.js
@@ -10,27 +10,20 @@
function chatOpsData($http) {
var testDetailRoute = 'test-data/chatops-hipchat.json';
- var testImgDetailRoute = 'test-data/chatops-hipchat-img.json';
-
-
-
return {
details: details
};
function details(serviceUrl) {
- console.log("ServiceURl:"+serviceUrl);
return $http.get(serviceUrl).then(function (response) {
- return response.data;
- },function(response){
- console.log("Error occured:"+JSON.stringify(response));
+ return response.data;
+ }, function (response) {
return response.data;
});
}
-
}
})();
\ No newline at end of file
|
removed un-necessary debug statements
|
Hygieia_Hygieia
|
train
|
js
|
f577fd6608de8f1c5dee655991b7e75de17cadf1
|
diff --git a/molgenis-data-validation/src/test/java/org/molgenis/data/validation/RepositoryValidationDecoratorTest.java b/molgenis-data-validation/src/test/java/org/molgenis/data/validation/RepositoryValidationDecoratorTest.java
index <HASH>..<HASH> 100644
--- a/molgenis-data-validation/src/test/java/org/molgenis/data/validation/RepositoryValidationDecoratorTest.java
+++ b/molgenis-data-validation/src/test/java/org/molgenis/data/validation/RepositoryValidationDecoratorTest.java
@@ -2968,8 +2968,8 @@ public class RepositoryValidationDecoratorTest
when(entity0.get(attrNillableMrefName)).thenReturn(emptyList());
when(entity0.get(attrUniqueStringName)).thenReturn("unique1");
when(entity0.get(attrUniqueXrefName)).thenReturn(refEntity0);
- when(entity0.get(attrReadonlyMrefName)).thenReturn(null);
- when(entity0.getEntities(attrReadonlyMrefName)).thenReturn(Collections.emptyList());
+ when(entity0.get(attrReadonlyMrefName)).thenReturn(emptyList());
+ when(entity0.getEntities(attrReadonlyMrefName)).thenReturn(emptyList());
when(decoratedRepo.findOneById("id0")).thenReturn(entity0);
|
Update expected behavior for get() on existing entity.
|
molgenis_molgenis
|
train
|
java
|
0efa37933ac07cb8f83af3baf5f619d03ce1bd73
|
diff --git a/manticore/core/workspace.py b/manticore/core/workspace.py
index <HASH>..<HASH> 100644
--- a/manticore/core/workspace.py
+++ b/manticore/core/workspace.py
@@ -534,7 +534,7 @@ class ManticoreOutput(object):
fd.write('{SolverException}')
with self._named_stream('stdout') as _out:
- with self._named_stream('stdout') as _err:
+ with self._named_stream('stderr') as _err:
with self._named_stream('stdin') as _in:
with self._named_stream('net') as _net:
for name, fd, data in state.platform.syscall_trace:
diff --git a/tests/test_workspace.py b/tests/test_workspace.py
index <HASH>..<HASH> 100644
--- a/tests/test_workspace.py
+++ b/tests/test_workspace.py
@@ -104,6 +104,7 @@ class StateTest(unittest.TestCase):
self.assertIn('trace', keys)
self.assertIn('syscalls', keys)
self.assertIn('stdout', keys)
+ self.assertIn('stderr', keys)
self.assertIn('stdin', keys)
self.assertIn('messages', keys)
self.assertIn('txt', keys)
|
Fix stderr file creation and test for it going forwards (#<I>)
* fix stderr file creation and test for it going forwards
* consolidate tests
* clean up imports
|
trailofbits_manticore
|
train
|
py,py
|
6ca811a503ae085565c9c9cae98e9cb46246fa87
|
diff --git a/spec/configuration_spec.rb b/spec/configuration_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/configuration_spec.rb
+++ b/spec/configuration_spec.rb
@@ -70,6 +70,21 @@ describe FastlaneCore do
@config = FastlaneCore::Configuration.create(@options, @values)
end
+ describe "#values" do
+ it "returns the user values" do
+ values = @config.values
+ expect(values[:output]).to eq('..')
+ expect(values[:cert_name]).to eq('asdf')
+ end
+
+ it "returns the default values" do
+ @config = FastlaneCore::Configuration.create(@options, {}) # no user inputs
+ values = @config.values
+ expect(values[:cert_name]).to eq('production_default')
+ expect(values[:output]).to eq('.')
+ end
+ end
+
describe "fetch" do
it "raises an error if a non symbol was given" do
expect {
|
Added tests for newly introduced configuration method
|
fastlane_fastlane
|
train
|
rb
|
5890cdccaea53a489f899101d539f37388efbdb0
|
diff --git a/src/VCR/Client.php b/src/VCR/Client.php
index <HASH>..<HASH> 100644
--- a/src/VCR/Client.php
+++ b/src/VCR/Client.php
@@ -17,7 +17,11 @@ class Client
public function send($request)
{
- $response = $this->client->send($request);
+ try {
+ $response = $this->client->send($request);
+ } catch (BadResponseException $e) {
+ $response = $e->getResponse();
+ }
return new Response($response->getStatusCode(), $response->getHeaders(), $response->getBody());
}
}
\ No newline at end of file
|
Ensures code execution continues when there is a bad response.
|
php-vcr_php-vcr
|
train
|
php
|
c5a98b78934b56f47f5f5d3e999ea2d1313617a9
|
diff --git a/backbone-faux-server.js b/backbone-faux-server.js
index <HASH>..<HASH> 100644
--- a/backbone-faux-server.js
+++ b/backbone-faux-server.js
@@ -253,7 +253,7 @@
transport = null;
// When emulating HTTP, 'create', 'update', 'delete' and 'patch' are all mapped to POST.
- if ((Backbone.emulateHTTP || options.emulateHTTP) && c.httpMethod !== "GET") {
+ if (options.emulateHTTP && c.httpMethod !== "GET") {
c.httpMethodOverride = c.httpMethod;
c.httpMethod = "POST";
}
|
Remove redundant emulateHTTP check from sync
|
biril_backbone-faux-server
|
train
|
js
|
9a48c34e5ff61be5b70fc98575fb496ed756fe87
|
diff --git a/openquake/commonlib/readinput.py b/openquake/commonlib/readinput.py
index <HASH>..<HASH> 100644
--- a/openquake/commonlib/readinput.py
+++ b/openquake/commonlib/readinput.py
@@ -1005,7 +1005,7 @@ def get_sitecol_assetcol(oqparam, haz_sitecol=None, cost_types=()):
and sitecol is not sitecol.complete):
t0 = time.time()
assetcol = assetcol.reduce_also(sitecol)
- logging.info('Reduced sitecol in %d seconds', time.time() - t0)
+ logging.info('Reduced sitecol in %.1f seconds', time.time() - t0)
return sitecol, assetcol, discarded
|
Improved monitoring [skip CI]
|
gem_oq-engine
|
train
|
py
|
9b8b9801154a78f727d025f4350c5191b38e489f
|
diff --git a/lib/queue.js b/lib/queue.js
index <HASH>..<HASH> 100644
--- a/lib/queue.js
+++ b/lib/queue.js
@@ -32,7 +32,7 @@ function Queue(process, opts) {
self.id = opts.id || false;
self.priority = opts.priority || null;
- self.cancelIfRunning = (opts.cancelIfRunning === undefined ? true : !!opts.cancelIfRunning);
+ self.cancelIfRunning = (opts.cancelIfRunning === undefined ? false : !!opts.cancelIfRunning);
self.autoResume = (opts.autoResume === undefined ? true : !!opts.autoResume);
self.failTaskOnProcessException = (opts.failTaskOnProcessException === undefined ? true : !!opts.failTaskOnProcessException);
self.filo = opts.filo || false;
diff --git a/test/basic.js b/test/basic.js
index <HASH>..<HASH> 100644
--- a/test/basic.js
+++ b/test/basic.js
@@ -360,7 +360,6 @@ describe('Basic Queue', function() {
done();
}, {
precondition: function (cb) {
- console.log('called precondtiion');
retries++;
cb(null, retries === 2)
},
|
Removes console log statement, and makes cancelIfRunning default to false
|
diamondio_better-queue
|
train
|
js,js
|
ddef926aa64a94e7be9d152e757277cb89aedd96
|
diff --git a/splinter/driver/zopetestbrowser.py b/splinter/driver/zopetestbrowser.py
index <HASH>..<HASH> 100644
--- a/splinter/driver/zopetestbrowser.py
+++ b/splinter/driver/zopetestbrowser.py
@@ -56,6 +56,9 @@ class ZopeTestBrowser(DriverAPI):
def fill_in(self, name, value):
self._browser.getControl(name=name).value = value
+ def choose(self, name):
+ control = self._browser.getControl(name=name)
+ control.value = control.options
class ZopeTestBrowserElement(ElementAPI):
@@ -85,6 +88,10 @@ class ZopeTestBrowserControlElement(ElementAPI):
@property
def value(self):
return self._control.value
+
+ @property
+ def checked(self):
+ return bool(self._control.value)
def click(self):
- return self._control.click()
\ No newline at end of file
+ return self._control.click()
|
implemented choose to driver, and checked property to element control
|
cobrateam_splinter
|
train
|
py
|
0df8db6819b0452528fbd3efce6b8f11e147fb3b
|
diff --git a/test.js b/test.js
index <HASH>..<HASH> 100644
--- a/test.js
+++ b/test.js
@@ -597,27 +597,27 @@ test('sync helper test', function(t) {
)();
t.throws(
instance.foo.bind(instance),
- 'got promise in sync mode',
+ { message: 'got promise in sync mode' },
'override throws if result is a promise'
);
t.throws(
instance.bar.bind(instance),
- 'got promise in sync mode',
+ { message: 'got promise in sync mode' },
'parallel throws if result is a promise'
);
t.throws(
instance.baz.bind(instance),
- 'got promise in sync mode',
+ { message: 'got promise in sync mode' },
'pipe throws if result is a promise'
);
t.throws(
instance.qux.bind(instance),
- 'got promise in sync mode',
+ { message: 'got promise in sync mode' },
'sequence throws if result is a promise'
);
t.throws(
instance.quz.bind(instance),
- 'got promise in sync mode',
+ { message: 'got promise in sync mode' },
'compose throws if result is a promise'
);
});
|
refactor: update to new ava throws syntax
AVA 3 requires throws assertions to have an expectations object instead
of string or regexp to match against.
|
untool_mixinable
|
train
|
js
|
f64dc45276282016ae4a5642555ea27ad6f6d1a6
|
diff --git a/src/java/org/apache/cassandra/cql3/statements/SelectStatement.java b/src/java/org/apache/cassandra/cql3/statements/SelectStatement.java
index <HASH>..<HASH> 100644
--- a/src/java/org/apache/cassandra/cql3/statements/SelectStatement.java
+++ b/src/java/org/apache/cassandra/cql3/statements/SelectStatement.java
@@ -1549,7 +1549,7 @@ public class SelectStatement implements CQLStatement, MeasurableForPreparedCache
return stmt.columnRestrictions[def.position()];
case REGULAR:
case STATIC:
- return stmt.metadataRestrictions.get(def);
+ return stmt.metadataRestrictions.get(def.name);
default:
throw new AssertionError();
}
|
fix merge-created bad map key lookup type
|
Stratio_stratio-cassandra
|
train
|
java
|
6c4422e79af987a4c3814af9cf31442eb5207003
|
diff --git a/utils/fslock/fslock_test.go b/utils/fslock/fslock_test.go
index <HASH>..<HASH> 100644
--- a/utils/fslock/fslock_test.go
+++ b/utils/fslock/fslock_test.go
@@ -53,11 +53,14 @@ func (s *fslockSuite) TestValidNamesLockDir(c *C) {
func (s *fslockSuite) TestInvalidNames(c *C) {
for _, name := range []string{
+ ".start",
+ "-start",
"NoCapitals",
"no+plus",
"no/slash",
"no\\backslash",
"no$dollar",
+ "no:colon",
} {
dir := c.MkDir()
_, err := fslock.NewLock(dir, name)
|
Added a few more invalid names.
|
juju_juju
|
train
|
go
|
13e3cce27946c3209e57b1bb1948289f8a9b8c16
|
diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -101,7 +101,7 @@ Linter.prototype.lintFiles = function (files, opts, cb) {
// flatten nested arrays
var files = results.reduce(function (files, result) {
result.forEach(function (file) {
- files.push(path.join(opts.cwd, file))
+ files.push(path.resolve(opts.cwd, file))
})
return files
}, [])
|
switch to path.resolve to make absolute paths work
|
standard_standard-engine
|
train
|
js
|
62d645b11a9116467b8c0165042ca40a85f00847
|
diff --git a/src/Auth.php b/src/Auth.php
index <HASH>..<HASH> 100755
--- a/src/Auth.php
+++ b/src/Auth.php
@@ -13,7 +13,7 @@ class Auth
{
$num_args = func_num_args();
if ($num_args === 1) {
- self::$config['url'] = self::$url;
+ self::$config['url'] = self::URL;
self::$config['key'] = func_get_arg(0);
self::$config['url'] = Factory::build('account')->authenticate()->url;
} elseif ($num_args === 2) {
|
Fix fatal error when setting API key
No such property $url, updated to use constant
|
loduis_teamwork.com-project-management
|
train
|
php
|
193f4f42efd85511e5bf16488d58fafbda818998
|
diff --git a/lib/Segment/Client.php b/lib/Segment/Client.php
index <HASH>..<HASH> 100644
--- a/lib/Segment/Client.php
+++ b/lib/Segment/Client.php
@@ -138,13 +138,17 @@ class Segment_Client {
* @param time $timestamp - time in seconds (time())
*/
private function formatTime($ts) {
+ // time()
if ($ts == null) $ts = time();
if ("integer" == gettype($ts)) return date("c", $ts);
- if (-1 == ($pos = strrpos($ts, "."))) return date("c");
- $sec = substr($ts, 0, $pos);
- $usec = substr($ts, $pos);
+
+ // anything else return a new date.
+ if ("double" != gettype($ts)) return date("c");
+
+ // microtime(true)
+ list($sec, $usec) = explode(".", (string)$ts);
$fmt = sprintf("Y-m-d\TH:i:s%sP", $usec);
- return date($fmt, (int)$sec);
+ return date($fmt, (int)$sec);
}
/**
|
client: fix float timestamp handling
|
segmentio_analytics-php
|
train
|
php
|
17b9544acc5c14f75070c1a54af2c365a4617f7e
|
diff --git a/lib/qless/worker/base.rb b/lib/qless/worker/base.rb
index <HASH>..<HASH> 100644
--- a/lib/qless/worker/base.rb
+++ b/lib/qless/worker/base.rb
@@ -216,6 +216,10 @@ module Qless
@current_job = job
end
end
+
+ def reconnect_each_client
+ uniq_clients.each { |client| client.redis.client.reconnect }
+ end
end
end
end
diff --git a/lib/qless/worker/forking.rb b/lib/qless/worker/forking.rb
index <HASH>..<HASH> 100644
--- a/lib/qless/worker/forking.rb
+++ b/lib/qless/worker/forking.rb
@@ -194,8 +194,7 @@ module Qless
slot = @sandboxes.delete(pid)
cpid = fork do
- # Reconnect each client
- uniq_clients.each { |client| client.redis.client.reconnect }
+ reconnect_each_client
spawn.run
end
|
Use a method to show intent rather than a comment.
|
seomoz_qless
|
train
|
rb,rb
|
cf2d66fcfb3235691ca910a8f923fc25acd371c5
|
diff --git a/tests/InstallerPluginTest.php b/tests/InstallerPluginTest.php
index <HASH>..<HASH> 100644
--- a/tests/InstallerPluginTest.php
+++ b/tests/InstallerPluginTest.php
@@ -14,7 +14,7 @@ use Mediact\CodingStandard\PhpStorm\FilesystemInterface;
use Mediact\CodingStandard\PhpStorm\Patcher\ConfigPatcherInterface;
use org\bovigo\vfs\vfsStream;
use PHPUnit\Framework\TestCase;
-use Mediact\CodingStandard\PhpStorm\Plugin;
+use Mediact\CodingStandard\PhpStorm\InstallerPlugin;
/**
* @coversDefaultClass \Mediact\CodingStandard\PhpStorm\InstallerPlugin
@@ -30,7 +30,7 @@ class InstallerPluginTest extends TestCase
{
$this->assertInternalType(
'array',
- Plugin::getSubscribedEvents()
+ InstallerPlugin::getSubscribedEvents()
);
}
@@ -82,7 +82,7 @@ class InstallerPluginTest extends TestCase
->method('patch')
->with($this->isInstanceOf(EnvironmentInterface::class));
- $plugin = new Plugin($patcher);
+ $plugin = new InstallerPlugin($patcher);
$plugin->onNewCodeEvent($event);
}
}
|
Changed class in phpunit test.
|
mediact_coding-standard-phpstorm
|
train
|
php
|
6666e2c5cc08778100311cadf9fb121fe298473c
|
diff --git a/src/test/com/twitter/elephantbird/pig/load/TestJsonLoader.java b/src/test/com/twitter/elephantbird/pig/load/TestJsonLoader.java
index <HASH>..<HASH> 100644
--- a/src/test/com/twitter/elephantbird/pig/load/TestJsonLoader.java
+++ b/src/test/com/twitter/elephantbird/pig/load/TestJsonLoader.java
@@ -78,7 +78,17 @@ public class TestJsonLoader {
FileWriter writer = new FileWriter(tempFile);
// json structure as in Twitter Streaming
- writer.write("{\"entities\":{\"hashtags\":[{\"indices\":[0,0],\"text\":\"test1\"},{\"indices\":[0,0],\"text\":\"test2\"}],\"user_mentions\":[],\"urls\":[]}}");
+ writer.write(
+ "{" +
+ " \"entities\": {" +
+ " \"hashtags\": [" +
+ " {\"indices\": [0,0], \"text\": \"test1\"}," +
+ " {\"indices\": [0,0], \"text\": \"test2\"}" +
+ " ]," +
+ " \"user_mentions\": []," +
+ " \"urls\": []" +
+ " }" +
+ "}");
writer.close();
// extract hashtags from it
|
Changes for a more understandable JSON formatting
|
twitter_elephant-bird
|
train
|
java
|
e026cc9e9a42b1c6e5ba3e415e855bea2fedb54b
|
diff --git a/src/ProjectKernel.php b/src/ProjectKernel.php
index <HASH>..<HASH> 100644
--- a/src/ProjectKernel.php
+++ b/src/ProjectKernel.php
@@ -178,7 +178,9 @@ class ProjectKernel
));
}
- // load in this order: library packages, kernel packages, project
+ // reset dirs, then load in this order:
+ // library packages, kernel packages, project
+ $this->includer->setDirs(array());
$this->includer->addDirs($this->packages['library']);
$this->includer->addDirs($this->packages['kernel']);
$this->includer->addDir($this->project->getBasePath());
|
fix bug where second includer pass loads modify.php twice
|
auraphp_Aura.Project_Kernel
|
train
|
php
|
61dfb3f873c02d9b1e385f5015b1be6aac846e2f
|
diff --git a/lib/gzr/modules/plan.rb b/lib/gzr/modules/plan.rb
index <HASH>..<HASH> 100644
--- a/lib/gzr/modules/plan.rb
+++ b/lib/gzr/modules/plan.rb
@@ -125,7 +125,7 @@ module Gzr
end
def upsert_plan_for_obj(user_id, source_plan, existing_plans)
- matches = existing_plans.select { |p| p.name == source_plan[:name] }
+ matches = existing_plans.select { |p| p.name == source_plan[:name] && user_id == p.user_id }
if matches.length > 0 then
say_ok "Modifying existing plan #{matches.first.id} #{matches.first.name}"
plan = keys_to_keep('update_scheduled_plan').collect do |e|
|
fixed bug where importing a scheduled plan might overwrite a plan of another user with the same name on the same object.
|
looker-open-source_gzr
|
train
|
rb
|
6439ed13587c60da9518ddae67818361290a6295
|
diff --git a/src/Sag.php b/src/Sag.php
index <HASH>..<HASH> 100644
--- a/src/Sag.php
+++ b/src/Sag.php
@@ -560,11 +560,24 @@ class Sag
$response->body .= $line;
}
+ /*
+ * $json won't be set if invalid JSON is sent back to us. This will most
+ * likely happen if we're GET'ing an attachment that isn't JSON (ex., a
+ * picture or plain text). Don't be fooled by storing a PHP string in an
+ * attachment as text/plain and then expecting it to be parsed by
+ * json_decode().
+ */
$json = json_decode($response->body);
- if(!empty($json->error))
- throw new SagCouchException("{$json->error} ({$json->reason})", $response->headers->_HTTP->status);
- $response->body = ($this->decodeResp) ? $json : $response->body;
+ if(isset($json))
+ {
+ // Check for an error from CouchDB regardless of whether they want JSON
+ // returned.
+ if(!empty($json->error))
+ throw new SagCouchException("{$json->error} ({$json->reason})", $response->headers->_HTTP->status);
+
+ $response->body = ($this->decodeResp) ? $json : $response->body;
+ }
return $response;
}
|
We now support non-JSON data being returned to us, thereby fixing stand alone attachment GET'ing.
|
sbisbee_sag
|
train
|
php
|
9823e13704a0449e1d65c4ca78aa237add934aaa
|
diff --git a/client/src/main/java/com/connectifier/xeroclient/XeroClient.java b/client/src/main/java/com/connectifier/xeroclient/XeroClient.java
index <HASH>..<HASH> 100644
--- a/client/src/main/java/com/connectifier/xeroclient/XeroClient.java
+++ b/client/src/main/java/com/connectifier/xeroclient/XeroClient.java
@@ -93,13 +93,14 @@ public class XeroClient {
}
private <T> T unmarshallResponse(Response response, Class<T> clazz) {
+ String responseBody = response.getBody();
try {
JAXBContext context = JAXBContext.newInstance(clazz);
Unmarshaller unmarshaller = context.createUnmarshaller();
- Source source = new StreamSource(new ByteArrayInputStream(response.getBody().getBytes()));
+ Source source = new StreamSource(new ByteArrayInputStream(responseBody.getBytes()));
return unmarshaller.unmarshal(source, clazz).getValue();
} catch (JAXBException e) {
- throw new IllegalStateException(e);
+ throw new IllegalStateException("Error unmarshalling response: " + responseBody, e);
}
}
|
Improve the error message when we have difficulty unmarshalling the response
|
benmccann_xero-java-client
|
train
|
java
|
50d90e6788d1251f698b2b0b6bcda3af3ae70d71
|
diff --git a/runtime/spec/language/regexp/interpolation_spec.rb b/runtime/spec/language/regexp/interpolation_spec.rb
index <HASH>..<HASH> 100644
--- a/runtime/spec/language/regexp/interpolation_spec.rb
+++ b/runtime/spec/language/regexp/interpolation_spec.rb
@@ -6,4 +6,12 @@ describe "Regexps with interpolation" do
str = "foo|bar"
/#{str}/.should == /foo|bar/
end
+
+ it "allows interpolation to interact with other Regexp constructs" do
+ str = "foo)|(bar"
+ /(#{str})/.should == /(foo)|(bar)/
+
+ str = "a"
+ /[#{str}-z]/.should == /[a-z]/
+ end
end
|
Add some more Regexp interpolation specs
|
opal_opal
|
train
|
rb
|
e9b2f542f266c40651d484d8f4461c5d42744e91
|
diff --git a/src/basis/ui/popup.js b/src/basis/ui/popup.js
index <HASH>..<HASH> 100644
--- a/src/basis/ui/popup.js
+++ b/src/basis/ui/popup.js
@@ -254,10 +254,16 @@
{
var offsetParent = getOffsetParent(this.element);
var box = resolveRelBox(this.relElement, offsetParent);
- var viewport = getViewportRect(offsetParent);
var width = this.element.offsetWidth;
var height = this.element.offsetHeight;
+ // NOTE: temporary solution addresses to app where document or body
+ // could be scrolled; for now it works, because popups lay into
+ // popupManager layer and documentElement or body could be a offset parent;
+ // but it would be broken when we allow popups to place in any layer in future;
+ // don't forget to implement univesal solution in this case
+ var viewport = getViewportRect(global, offsetParent);
+
dir = normalizeDir(dir, this.dir).split(' ');
var pointX = dir[0] == CENTER ? box.left + (box.width >> 1) : box[dir[0].toLowerCase()];
|
basis.ui.popup: fix viewport resolving for scrolling pages
|
basisjs_basisjs
|
train
|
js
|
3872fa587d559b03cdc76b3f60fac4cb673b7cf2
|
diff --git a/lib/Accessory.js b/lib/Accessory.js
index <HASH>..<HASH> 100644
--- a/lib/Accessory.js
+++ b/lib/Accessory.js
@@ -174,8 +174,11 @@ Accessory.prototype.addBridgedAccessory = function(accessory) {
throw new Error("Cannot add a bridged Accessory with the same UUID as another bridged Accessory: " + existing.UUID);
}
- // Setup Bridging State Service
- accessory.addService(Service.BridgingState);
+ if(accessory.getService(Service.BridgingState) == undefined) {
+ // Setup Bridging State Service
+ accessory.addService(Service.BridgingState);
+ }
+
accessory
.getService(Service.BridgingState)
.getCharacteristic(Characteristic.LinkAddress)
|
Only add bridge service if the provided accessory doesn't have one.
|
KhaosT_HAP-NodeJS
|
train
|
js
|
150760ed421a14fb702d3d5ad4c3c3fc8dd28811
|
diff --git a/src/localizers/moment.js b/src/localizers/moment.js
index <HASH>..<HASH> 100644
--- a/src/localizers/moment.js
+++ b/src/localizers/moment.js
@@ -6,7 +6,7 @@ let dateRangeFormat = ({ start, end }, culture, local)=>
local.format(start, 'L', culture) + ' — ' + local.format(end, 'L', culture)
let timeRangeFormat = ({ start, end }, culture, local) =>
- local.format(start, 'LT', culture) + ' — ' + local.format(end, 'h:mm' : 'LT', culture)
+ local.format(start, 'LT', culture) + ' — ' + local.format(end, 'LT', culture)
let weekRangeFormat = ({ start, end }, culture, local)=>
local.format(start, 'MMM DD', culture) +
|
Fix time range format
Fixes #<I> I think
|
intljusticemission_react-big-calendar
|
train
|
js
|
9c2ffe9759430d6da8b2b2f6ea8141e4c56ed5c3
|
diff --git a/topologies/replset.js b/topologies/replset.js
index <HASH>..<HASH> 100644
--- a/topologies/replset.js
+++ b/topologies/replset.js
@@ -1022,7 +1022,6 @@ ReplSet.prototype.destroy = function(options) {
// Clear out all monitoring
for (var i = 0; i < this.intervalIds.length; i++) {
this.intervalIds[i].stop();
- this.intervalIds[i].stop();
}
// Reset list of intervalIds
|
chore(topology): removing double timeout clear
This was accidentally introduced during the following refactor:
cd<I>d<I>f<I>ad<I>c6a<I>ffcfa<I>d1e7d3a2
|
mongodb_node-mongodb-native
|
train
|
js
|
24b6c4df7487a7fbe7f0cfa40033d8e6d4416213
|
diff --git a/lib/mongodb/aggregation_cursor.js b/lib/mongodb/aggregation_cursor.js
index <HASH>..<HASH> 100644
--- a/lib/mongodb/aggregation_cursor.js
+++ b/lib/mongodb/aggregation_cursor.js
@@ -203,7 +203,9 @@ var AggregationCursor = function(collection, serverCapabilities) {
}
// Inherit from Readable
-inherits(AggregationCursor, Readable);
+if(Readable != null) {
+ inherits(AggregationCursor, Readable);
+}
// Exports the Aggregation Framework
exports.AggregationCursor = AggregationCursor;
\ No newline at end of file
|
Only inherit from streamable if available
|
mongodb_node-mongodb-native
|
train
|
js
|
e52d2e51642f44f681faceb7bfd535764ab91f9d
|
diff --git a/ui/src/sources/components/SourceForm.js b/ui/src/sources/components/SourceForm.js
index <HASH>..<HASH> 100644
--- a/ui/src/sources/components/SourceForm.js
+++ b/ui/src/sources/components/SourceForm.js
@@ -96,7 +96,7 @@ const SourceForm = ({
/>
</div>
: null}
- <div className={`form-group col-xs-12 ${isUsingAuth ? 'col-sm-6' : ''}`}>
+ <div className="form-group col-xs-12">
<label htmlFor="telegraf">Telegraf Database</label>
<input
type="text"
|
Make telegraph database input in source form full width
Don’t need to make space for the role dropdown any longer
|
influxdata_influxdb
|
train
|
js
|
05fc6de502faf57310931014a658261c7e50aed8
|
diff --git a/src/org/jgroups/protocols/TUNNEL.java b/src/org/jgroups/protocols/TUNNEL.java
index <HASH>..<HASH> 100644
--- a/src/org/jgroups/protocols/TUNNEL.java
+++ b/src/org/jgroups/protocols/TUNNEL.java
@@ -1,4 +1,3 @@
-
package org.jgroups.protocols;
import org.jgroups.Address;
@@ -17,6 +16,7 @@ import java.io.DataInputStream;
import java.net.DatagramSocket;
import java.net.InetSocketAddress;
import java.net.UnknownHostException;
+import java.net.SocketException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
@@ -226,6 +226,8 @@ public class TUNNEL extends TP {
}
break;
}
+ }catch (SocketException ioe) {
+ break;
}catch (Exception ioe) {
if(stub.isConnected())
continue mainloop;
|
Fixing scenario with dead socket looping forever: JGRP-<I>
Proposed change for <URL>
|
belaban_JGroups
|
train
|
java
|
c7607f45518f1e35de41fbea5ce3428218449948
|
diff --git a/lib/hamlit/railtie.rb b/lib/hamlit/railtie.rb
index <HASH>..<HASH> 100644
--- a/lib/hamlit/railtie.rb
+++ b/lib/hamlit/railtie.rb
@@ -4,6 +4,11 @@ require 'rails'
module Hamlit
class Railtie < ::Rails::Railtie
initializer :hamlit, before: :load_config_initializers do |app|
+ # Load haml/plugin first to override if available
+ begin
+ require 'haml/plugin'
+ rescue LoadError
+ end
require 'hamlit/rails_template'
end
end
|
Force overriding Haml::Plugin
Close #<I>
|
haml_haml
|
train
|
rb
|
41e2103959fa4b6a64653b6d329ea3294d4fe9a2
|
diff --git a/host-controller/src/main/java/org/jboss/as/domain/controller/transformers/JSFSubsystemTransformers.java b/host-controller/src/main/java/org/jboss/as/domain/controller/transformers/JSFSubsystemTransformers.java
index <HASH>..<HASH> 100644
--- a/host-controller/src/main/java/org/jboss/as/domain/controller/transformers/JSFSubsystemTransformers.java
+++ b/host-controller/src/main/java/org/jboss/as/domain/controller/transformers/JSFSubsystemTransformers.java
@@ -117,7 +117,7 @@ class JSFSubsystemTransformers {
final String name = operation.require(NAME).asString();
final ModelNode value = operation.get(ModelDescriptionConstants.VALUE);
if (SLOT_ATTRIBUTE_NAME.equals(name)) {
- if (value.isDefined() && value.equals(SLOT_DEFAULT_VALUE)) {
+ if (value.isDefined() && SLOT_DEFAULT_VALUE.equals(value.asString())) {
return DISCARD.transformOperation(context, address, operation);
} else {
return new TransformedOperation(operation,
|
[WFLY-<I>] fix ModelNode / String comparison
fix the comparison between value ModelNode and SLOT_DEFAULT_VALUE string
|
wildfly_wildfly
|
train
|
java
|
7e3e46b5bb4355dee3d0bb2740c302c83644bfb0
|
diff --git a/tests/test_settings.py b/tests/test_settings.py
index <HASH>..<HASH> 100644
--- a/tests/test_settings.py
+++ b/tests/test_settings.py
@@ -19,14 +19,18 @@ import tempfile
import sys
from os.path import basename, dirname, splitext
-from PyQt5.QtCore import Qt, QCoreApplication, QSettings
+from PyQt5.QtCore import Qt, QSettings
from PyQt5.QtGui import QColor, QFont
+from PyQt5.QtWidgets import QApplication
from ReText import readListFromSettings, writeListToSettings, \
readFromSettings, writeToSettings
from ReText.highlighter import colorScheme, updateColorScheme
-# Keep a reference so it is not garbage collected
-app = QCoreApplication.instance() or QCoreApplication(sys.argv)
+# For this particular test, QCoreApplication is enough. However, we should
+# only have one QCoreApplication instance for all tests in a process. As
+# other tests need QApplication, we should not create a bare QCoreApplication
+# here. Also, keep a reference so it is not garbage collected.
+app = QApplication.instance() or QApplication(sys.argv)
class TestSettings(unittest.TestCase):
def setUp(self):
|
tests: Create (full) QApplication in test_settings too
Explain why that is needed in a comment
|
retext-project_retext
|
train
|
py
|
ad659a4e4cd3820497eb1a94896ad75ee3929573
|
diff --git a/lib/handlers/bin.js b/lib/handlers/bin.js
index <HASH>..<HASH> 100644
--- a/lib/handlers/bin.js
+++ b/lib/handlers/bin.js
@@ -1311,10 +1311,10 @@ module.exports = Observable.extend({
}
// Include 'Edit in JS Bin' button
- if (options.edit) {
+ if (options.edit && !req.ajax) {
var data = {static: helpers.urlForStatic('', req.secure), root: helpers.url('/', true, req.secure) };
insert.push('<script src="' + helpers.urlForStatic('js/render/edit.js?' + helpers.set('version'), req.secure) + '"></script>');
- insert.push('<script>jsbinShowEdit(' + JSON.stringify(data) + ');</script>');
+ insert.push('<script>jsbinShowEdit && jsbinShowEdit(' + JSON.stringify(data) + ');</script>');
}
// Trigger an event to allow listeners to apply scripts to the page.
|
Don't send HTML with edit links on XHR
|
jsbin_jsbin
|
train
|
js
|
a2936ecbd97ae9806a452111d0392f19d70b7e6c
|
diff --git a/src/Exception/BadMethodCallException.php b/src/Exception/BadMethodCallException.php
index <HASH>..<HASH> 100644
--- a/src/Exception/BadMethodCallException.php
+++ b/src/Exception/BadMethodCallException.php
@@ -1,21 +1,5 @@
<?php
-/**
- * Genial Framework.
- *
- * @author Nicholas English <https://github.com/Nenglish7>
- * @author Genial Contributors <https://github.com/orgs/Genial-Framework/people>
- *
- * @link <https://github.com/Genial-Framework/Env> for the canonical source repository.
- * @copyright Copyright (c) 2017-2018 Genial Framework. <https://github.com/Genial-Framework>
- * @license <https://github.com/Genial-Framework/Env/blob/master/LICENSE> New BSD License.
- */
-
namespace Genial\Env\Exception;
-
-/**
- * BadMethodCallException.
- */
class BadMethodCallException extends \BadMethodCallException implements ExceptionInterface
{
-
}
|
Update BadMethodCallException.php
|
genial-framework_Environment
|
train
|
php
|
29e875aa3e3a547c8ec0786f7d13ffdc0ece764b
|
diff --git a/oct2py/matread.py b/oct2py/matread.py
index <HASH>..<HASH> 100644
--- a/oct2py/matread.py
+++ b/oct2py/matread.py
@@ -77,7 +77,7 @@ class MatRead(object):
data = loadmat(self.out_file)
except UnicodeDecodeError as e:
raise Oct2PyError(str(e))
- for key in data.keys():
+ for key in list(data.keys()):
if key.startswith('_') and not key == '_':
del data[key]
else:
|
Avoid dictionary changing size in for loop in py3k
|
blink1073_oct2py
|
train
|
py
|
c5a3c5a343078882a00cd2aca26e8d969eab9752
|
diff --git a/test/mocks/MockLogger.js b/test/mocks/MockLogger.js
index <HASH>..<HASH> 100644
--- a/test/mocks/MockLogger.js
+++ b/test/mocks/MockLogger.js
@@ -4,9 +4,9 @@
* @author: [email protected]
* @created: 7/8/14 5:16 PM
*/
-const dash = require('lodash' ),
- Logger = require('../../lib/Logger' ),
- MockAppender = require('./MockAppender');
+const dash = require('lodash');
+const Logger = require('../../lib/Logger');
+const MockAppender = require('./MockAppender');
const MockLogger = function(options) {
'use strict';
@@ -21,13 +21,13 @@ const MockLogger = function(options) {
opts.pid = 'test12345';
}
if (!opts.appenders) {
- opts.appenders = [ appender ];
+ opts.appenders = [appender];
}
if (!opts.level) {
opts.level = 'trace';
}
- dash.extend( this, new Logger(opts) );
+ dash.extend(this, new Logger(opts));
this.getLogEntries = function() {
return appender.entries;
@@ -46,7 +46,7 @@ MockLogger.createLogger = function(category, level) {
opts.level = level;
}
- return new MockLogger( opts );
+ return new MockLogger(opts);
};
module.exports = MockLogger;
|
refactor for eslint rules
file: MockLogger.js
refactor var declarations and spaces
|
darrylwest_simple-node-logger
|
train
|
js
|
fb5d8c51ae24d44b42b69b427d0ed0037ef5f750
|
diff --git a/cf/commands/environmentvariablegroup/set_running_environment_variable_group.go b/cf/commands/environmentvariablegroup/set_running_environment_variable_group.go
index <HASH>..<HASH> 100644
--- a/cf/commands/environmentvariablegroup/set_running_environment_variable_group.go
+++ b/cf/commands/environmentvariablegroup/set_running_environment_variable_group.go
@@ -29,7 +29,7 @@ func (cmd SetRunningEnvironmentVariableGroup) Metadata() command_metadata.Comman
Name: "set-running-environment-variable-group",
Description: T("Pass parameters as JSON to create a running environment variable group"),
ShortName: "srevg",
- Usage: T("CF_NAME set-running-environment-variable-group"),
+ Usage: T(`CF_NAME set-running-environment-variable-group '{"name":"value","name":"value"}'`),
}
}
|
Modify srevg command usage to show example.
[Finishes #<I>]
|
cloudfoundry_cli
|
train
|
go
|
777b54cf4dfa228a320d6854c573c973fa62b452
|
diff --git a/src/Engines/Node.php b/src/Engines/Node.php
index <HASH>..<HASH> 100644
--- a/src/Engines/Node.php
+++ b/src/Engines/Node.php
@@ -45,6 +45,6 @@ class Node implements Engine
protected function createTempFilePath(): string
{
- return $this->tempPath.'/'.md5(time()).'.js';
+ return implode(DIRECTORY_SEPARATOR, [$this->tempPath, md5(time()).'.js']);
}
}
|
Use DIRECTORY_SEPARATOR constant
|
spatie_server-side-rendering
|
train
|
php
|
f24d909dae53b122b14b7d98f88f7e4d0558ed17
|
diff --git a/test/e2e/metrics_util.go b/test/e2e/metrics_util.go
index <HASH>..<HASH> 100644
--- a/test/e2e/metrics_util.go
+++ b/test/e2e/metrics_util.go
@@ -39,7 +39,7 @@ import (
const (
podStartupThreshold time.Duration = 5 * time.Second
- listPodLatencySmallThreshold time.Duration = 500 * time.Millisecond
+ listPodLatencySmallThreshold time.Duration = 1 * time.Second
listPodLatencyMediumThreshold time.Duration = 1 * time.Second
listPodLatencyLargeThreshold time.Duration = 1 * time.Second
apiCallLatencySmallThreshold time.Duration = 250 * time.Millisecond
|
Increase limit for listing pods in <I>-node cluster
|
kubernetes_kubernetes
|
train
|
go
|
8d70f476d0f1b8b039321a84fca9f168ca5c0622
|
diff --git a/test/models/workflow_test.rb b/test/models/workflow_test.rb
index <HASH>..<HASH> 100644
--- a/test/models/workflow_test.rb
+++ b/test/models/workflow_test.rb
@@ -261,6 +261,7 @@ class WorkflowTest < ActiveSupport::TestCase
user.request_review(edition,{comment: "Review this guide please."})
assert edition.can_approve_review?
other_user.approve_review(edition, {comment: "Looks good to me"})
+ assert edition.can_request_amendments?
assert edition.can_publish?
end
|
Assert ready to amends_needed transition is possible
|
alphagov_govuk_content_models
|
train
|
rb
|
26b796ef737e830c311d687ed42c8783d2de9273
|
diff --git a/lib/etl/control/control.rb b/lib/etl/control/control.rb
index <HASH>..<HASH> 100644
--- a/lib/etl/control/control.rb
+++ b/lib/etl/control/control.rb
@@ -283,7 +283,7 @@ module ETL #:nodoc:
end
def parse_text(text)
- control = ETL::Control::Control.new(nil)
+ control = ETL::Control::Control.new('no-file')
eval(text, Context.create(control), 'inline')
control.validate
control
|
Put a fake filename for parse_text. This helper is only used by tests, and some scenario require a filename to be set.
|
activewarehouse_activewarehouse-etl
|
train
|
rb
|
6914fe436c608f0624e0cafdc9740437761f38a3
|
diff --git a/cmd/object-multipart-common.go b/cmd/object-multipart-common.go
index <HASH>..<HASH> 100644
--- a/cmd/object-multipart-common.go
+++ b/cmd/object-multipart-common.go
@@ -67,16 +67,6 @@ func (u *uploadsV1) RemoveUploadID(uploadID string) {
}
}
-// Index - returns the index of matching the upload id.
-func (u uploadsV1) Index(uploadID string) int {
- for i, u := range u.Uploads {
- if u.UploadID == uploadID {
- return i
- }
- }
- return -1
-}
-
// readUploadsJSON - get all the saved uploads JSON.
func readUploadsJSON(bucket, object string, disk StorageAPI) (uploadIDs uploadsV1, err error) {
uploadJSONPath := path.Join(mpartMetaPrefix, bucket, object, uploadsJSONFile)
|
Remove unused function. (#<I>)
|
minio_minio
|
train
|
go
|
dfadf3fa857e0099ecac6d51b47be43ab263a9d4
|
diff --git a/taxtastic/refpkg.py b/taxtastic/refpkg.py
index <HASH>..<HASH> 100644
--- a/taxtastic/refpkg.py
+++ b/taxtastic/refpkg.py
@@ -53,21 +53,20 @@ def md5file(fobj):
def scratch_file(unlink=True, **kwargs):
"""Create a temporary file and return its name.
- Additional arguments are passed to ``tempfile.mkstemp``
+ Additional arguments are passed to :class:`tempfile.NamedTemporaryFile`
At the start of the with block a secure, temporary file is created
and its name returned. At the end of the with block it is
deleted.
"""
+ kwargs['delete'] = False
+ tf = tempfile.NamedTemporaryFile(**kwargs)
+ tf.close()
try:
- tmp_fd, tmp_name = tempfile.mkstemp(text=True, **kwargs)
- os.close(tmp_fd)
- yield tmp_name
- except ValueError:
- raise
- else:
+ yield tf.name
+ finally:
if unlink:
- os.unlink(tmp_name)
+ os.unlink(tf.name)
def manifest_template():
|
Bugfix: temporary files were not cleaned up.
|
fhcrc_taxtastic
|
train
|
py
|
9940d021b488ae1a5112aa9d24adb25b85b02cd2
|
diff --git a/lib/rest-ftp-daemon/settings.rb b/lib/rest-ftp-daemon/settings.rb
index <HASH>..<HASH> 100644
--- a/lib/rest-ftp-daemon/settings.rb
+++ b/lib/rest-ftp-daemon/settings.rb
@@ -12,7 +12,7 @@ class Settings < Settingslogic
# Direct access to any depth
def at *path
- path.reduce(Settings) {|m,key| m && m[key.to_s] }
+ path.reduce(Settings) { |m, key| m && m[key.to_s] }
end
# Dump whole settings set to readable YAML
|
rubocop: space missing after comma or between { and | missing
|
bmedici_rest-ftp-daemon
|
train
|
rb
|
52c44d9146287361c3b159d9bbb84f30e81e124c
|
diff --git a/src/android/Sync.java b/src/android/Sync.java
index <HASH>..<HASH> 100644
--- a/src/android/Sync.java
+++ b/src/android/Sync.java
@@ -217,7 +217,7 @@ public class Sync extends CordovaPlugin {
OpenForReadResult readResult = null;
File outputDir = cordova.getActivity().getCacheDir();
- file = File.createTempFile(id, ".tmp", outputDir);
+ file = File.createTempFile(("cdv_" + id), ".tmp", outputDir);
final Uri targetUri = resourceApi.remapUri(Uri.fromFile(file));
|
Issue #<I>: Error in downloading a zip from URL on Android
|
phonegap_phonegap-plugin-contentsync
|
train
|
java
|
98040128385c454a267056eda895369bf24ccabf
|
diff --git a/lib/discordrb/commands/rate_limiter.rb b/lib/discordrb/commands/rate_limiter.rb
index <HASH>..<HASH> 100644
--- a/lib/discordrb/commands/rate_limiter.rb
+++ b/lib/discordrb/commands/rate_limiter.rb
@@ -108,7 +108,7 @@ module Discordrb::Commands
# @return [Integer, false] How much time to wait or false if the request succeeded.
def rate_limited?(key, thing)
# Check whether the bucket actually exists
- return false unless @buckets[key]
+ return false unless @buckets && @buckets[key]
@buckets[key].rate_limited?(thing)
end
|
Check for the existence of the buckets hash before rate limiting
|
meew0_discordrb
|
train
|
rb
|
8fe300325c793994cfd7e6a508b6185df05b0684
|
diff --git a/src/Search/Bitap/convertMaskToIndices.php b/src/Search/Bitap/convertMaskToIndices.php
index <HASH>..<HASH> 100644
--- a/src/Search/Bitap/convertMaskToIndices.php
+++ b/src/Search/Bitap/convertMaskToIndices.php
@@ -27,7 +27,7 @@ function convertMaskToIndices(array $matchmask = [], ?int $minMatchCharLength =
}
// (i-1 - start) + 1 => i - start
- if ($matchmask[$i - 1] ?? false && $i - $start >= $minMatchCharLength) {
+ if (($matchmask[$i - 1] ?? false) && $i - $start >= $minMatchCharLength) {
$indices[] = [$start, $i - 1];
}
|
fix: wrap ?? operation in paranthese to following precede && operator
|
Loilo_Fuse
|
train
|
php
|
5d82546087537b1ffd5800a22434f31204c53ebd
|
diff --git a/lib/jasmine-stealth.js b/lib/jasmine-stealth.js
index <HASH>..<HASH> 100644
--- a/lib/jasmine-stealth.js
+++ b/lib/jasmine-stealth.js
@@ -1,9 +1,5 @@
//https://github.com/searls/jasmine-stealth
(function(jasmine) {
- var argsToArray = function(args) {
- return Array.prototype.slice.call(args, 0);
- };
-
beforeEach(function() {
this.stubFor = this.spyOn;
});
@@ -11,7 +7,7 @@
jasmine.createStub = jasmine.createSpy;
jasmine.Spy.prototype.when = function() {
var spy = this,
- ifThis = argsToArray(arguments);
+ ifThis = jasmine.util.argsToArray(arguments);
spy._stealth_stubbings = spy._stealth_stubbings || [];
var priorStubbing = spy.plan();
@@ -19,7 +15,7 @@
spy.andCallFake(function() {
for(var i=0;i<spy._stealth_stubbings.length;i++) {
var stubbing = spy._stealth_stubbings[i];
- if(jasmine.getEnv().equals_(stubbing.ifThis,argsToArray(arguments))) {
+ if(jasmine.getEnv().equals_(stubbing.ifThis,jasmine.util.argsToArray(arguments))) {
return stubbing.thenThat;
}
}
|
looky there - jasmine already had an argsToArray. #GMTA
|
searls_jasmine-stealth
|
train
|
js
|
edb1ebcd8cd99ee71086f7534973a528d06601bc
|
diff --git a/rootpy/stl.py b/rootpy/stl.py
index <HASH>..<HASH> 100644
--- a/rootpy/stl.py
+++ b/rootpy/stl.py
@@ -19,15 +19,15 @@ class TemplateNode(object):
self.name = name
self.children = []
- def compile(self):
+ def compile(self, verbose=False):
"""
Recursively compile chilren
"""
if not self.children:
return
for child in self.children:
- child.compile()
- generate(str(self), self.headers, verbose=True)
+ child.compile(verbose=verbose)
+ generate(str(self), self.headers, verbose=verbose)
@property
def headers(self):
diff --git a/rootpy/tests/test_stl.py b/rootpy/tests/test_stl.py
index <HASH>..<HASH> 100644
--- a/rootpy/tests/test_stl.py
+++ b/rootpy/tests/test_stl.py
@@ -30,7 +30,6 @@ def test_parse():
print template
assert_raises(SyntaxError, parse_template, template)
-
if __name__ == "__main__":
import nose
nose.runmodule()
|
stl compilation verbosity and tests
|
rootpy_rootpy
|
train
|
py,py
|
35021f955b9cc93aa091c086f29f06fae17b2807
|
diff --git a/listing-bundle/contao/modules/ModuleListing.php b/listing-bundle/contao/modules/ModuleListing.php
index <HASH>..<HASH> 100644
--- a/listing-bundle/contao/modules/ModuleListing.php
+++ b/listing-bundle/contao/modules/ModuleListing.php
@@ -205,7 +205,7 @@ class ModuleListing extends \Module
/**
* Prepare the URL
*/
- $strUrl = preg_replace('/\?.*$/', '', $this->Environment->request);
+ $strUrl = preg_replace('/\?.*$/', '', \Environment::get('request'));
$blnQuery = false;
foreach (preg_split('/&(amp;)?/', $_SERVER['QUERY_STRING']) as $fragment)
|
[Listing] Replaced `$this->Environment->…` with `Environment::get(…)`
|
contao_contao
|
train
|
php
|
613566ac71422280e4ff619d3b687ad364cbe5bd
|
diff --git a/Classes/Search/SearchComponentManager.php b/Classes/Search/SearchComponentManager.php
index <HASH>..<HASH> 100644
--- a/Classes/Search/SearchComponentManager.php
+++ b/Classes/Search/SearchComponentManager.php
@@ -99,4 +99,18 @@ class SearchComponentManager
return $searchComponent;
}
+
+ /**
+ * Unregisters a search component
+ *
+ * @param string $componentName Search component name
+ */
+ public function removeSearchComponent($componentName)
+ {
+ if (!array_key_exists($componentName, self::$searchComponents)) {
+ return;
+ }
+
+ unset(self::$searchComponents[$componentName]);
+ }
}
|
[FEATURE] Allow unregistration of search components (#<I>)
|
TYPO3-Solr_ext-solr
|
train
|
php
|
009d8c6dbf677ba7f28016ead7d3161dd6d4e1b3
|
diff --git a/src/Client.php b/src/Client.php
index <HASH>..<HASH> 100644
--- a/src/Client.php
+++ b/src/Client.php
@@ -19,6 +19,7 @@ class Client {
const MESSAGE_TYPE_UNSUSCRIBE = 6;
const ERROR_REQUEST_CLIENT_NO_EXIST = 'ErrorRequestClientNoExist';
+ const ERROR_REQUEST_CLIENT_NOT_CONNECTED = 'ErrorRequestClientNotConnected';
/**
|
Client: Added ErrorRequestClientNotConnected
|
dronemill_eventsocket-client-php
|
train
|
php
|
72406bdc430294d5e065f3c8836bdca7fc42cd0b
|
diff --git a/interfaces/associations/support/fixtures/hasMany.child.fixture.js b/interfaces/associations/support/fixtures/hasMany.child.fixture.js
index <HASH>..<HASH> 100644
--- a/interfaces/associations/support/fixtures/hasMany.child.fixture.js
+++ b/interfaces/associations/support/fixtures/hasMany.child.fixture.js
@@ -27,6 +27,7 @@ module.exports = Waterline.Collection.extend({
amount: {
type: 'number',
+ columnName: 'amt',
autoMigrations: {
columnType: 'integer'
}
@@ -39,6 +40,14 @@ module.exports = Waterline.Collection.extend({
}
},
+ note: {
+ type: 'string',
+ columnName: 'memo',
+ autoMigrations: {
+ columnType: 'varchar'
+ }
+ },
+
apartment: {
model: 'apartment',
columnName: 'apartment_id',
diff --git a/interfaces/associations/support/fixtures/hasMany.parent.fixture.js b/interfaces/associations/support/fixtures/hasMany.parent.fixture.js
index <HASH>..<HASH> 100644
--- a/interfaces/associations/support/fixtures/hasMany.parent.fixture.js
+++ b/interfaces/associations/support/fixtures/hasMany.parent.fixture.js
@@ -26,6 +26,7 @@ module.exports = Waterline.Collection.extend({
},
name: {
+ columnName: 'n',
type: 'string',
autoMigrations: {
columnType: 'varchar'
|
Use custom column names for some attributes in has-many tests
(shouldn't affect the tests, which is the point)
|
balderdashy_waterline-adapter-tests
|
train
|
js,js
|
667253a5c31062c58a374423243bbb4654b6f913
|
diff --git a/peewee_mssql.py b/peewee_mssql.py
index <HASH>..<HASH> 100644
--- a/peewee_mssql.py
+++ b/peewee_mssql.py
@@ -11,8 +11,6 @@ try:
except ImportError:
PooledDatabase = None
-__version__ = '0.1.1'
-
class MssqlQueryCompiler(QueryCompiler):
# TODO: implement limit and offset properly, we can use:
# SELECT *
|
Update peewee_mssql.py
|
COUR4G3_peewee-mssql
|
train
|
py
|
6bf6c628b6563f62e3514a9e4bfb5390322120f3
|
diff --git a/ast.go b/ast.go
index <HASH>..<HASH> 100644
--- a/ast.go
+++ b/ast.go
@@ -936,13 +936,13 @@ func (s *SelectStatement) ValidateAggregates(tr targetRequirement) error {
groupByDuration, _ := s.GroupByInterval()
// If we have a group by interval, but no aggregate function, it's an invalid statement
- if s.IsRawQuery && groupByDuration > 0 {
+ if s.IsRawQuery && !s.Distinct && groupByDuration > 0 {
return fmt.Errorf("GROUP BY requires at least one aggregate function")
}
// If we have an aggregate function with a group by time without a where clause, it's an invalid statement
if tr == targetNotRequired { // ignore create continuous query statements
- if !s.IsRawQuery && groupByDuration > 0 && !s.hasTimeDimensions(s.Condition) {
+ if (!s.IsRawQuery || s.Distinct) && groupByDuration > 0 && !s.hasTimeDimensions(s.Condition) {
return fmt.Errorf("aggregate functions with GROUP BY time require a WHERE time clause")
}
}
|
make distinct care about the same thing aggregates do for validation
|
influxdata_influxql
|
train
|
go
|
03f95b78eeb62d840fcb2628dc5e4d49431584ee
|
diff --git a/src/cell.js b/src/cell.js
index <HASH>..<HASH> 100644
--- a/src/cell.js
+++ b/src/cell.js
@@ -139,10 +139,14 @@ Cell.prototype._topLeftChar = function(offset){
else {
leftChar = offset == 0 ? 'mid-mid' : 'bottom-mid';
if(this.cells){ //TODO: cells should always exist - some tests don't fill it in though
- var cellAbove = this.cells[this.y-1][x];
- if(cellAbove instanceof Cell.NoOpCell){
+ var spanAbove = this.cells[this.y-1][x] instanceof Cell.NoOpCell;
+ var spanLeft = offset == 0 && this.cells[this.y][x-1] instanceof Cell.RowSpanCell;
+ if(spanAbove){
leftChar = offset == 0 ? 'top-mid' : 'mid';
}
+ if(spanLeft){
+ leftChar = 'left-mid';
+ }
}
}
}
diff --git a/test/table-test.js b/test/table-test.js
index <HASH>..<HASH> 100644
--- a/test/table-test.js
+++ b/test/table-test.js
@@ -59,7 +59,7 @@ describe('Table', function () {
var expected = [
'┌───────────┬───────────┬───────┐'
, '│ greetings │ greetings │ hello │'
- , '│ │ ┼───────┤'
+ , '│ │ ├───────┤'
, '│ │ │ howdy │'
, '└───────────┴───────────┴───────┘'
];
|
fix cell render: rowSpan to the left of non-rowSpan cells.
|
cli-table_cli-table3
|
train
|
js,js
|
fca356158b396b766fc7c8de2bd6f5c1ed523c0b
|
diff --git a/lib/active_shipping/shipping/carriers/fedex.rb b/lib/active_shipping/shipping/carriers/fedex.rb
index <HASH>..<HASH> 100644
--- a/lib/active_shipping/shipping/carriers/fedex.rb
+++ b/lib/active_shipping/shipping/carriers/fedex.rb
@@ -380,11 +380,10 @@ module ActiveMerchant
location = Location.new(:city => city, :state => state, :postal_code => zip_code, :country => country)
description = event.get_text('EventDescription').to_s
-
- # for now, just assume UTC, even though it probably isn't
- time = Time.parse("#{event.get_text('Timestamp').to_s}")
- zoneless_time = Time.utc(time.year, time.month, time.mday, time.hour, time.min, time.sec)
-
+
+ time = Time.parse("#{event.get_text('Timestamp').to_s}")
+ zoneless_time = time.utc
+
shipment_events << ShipmentEvent.new(description, zoneless_time, location)
end
shipment_events = shipment_events.sort_by(&:time)
|
Removed FedEx timezone stripping.
|
Shopify_active_shipping
|
train
|
rb
|
a51676e0eb602aa46a8a1f982b804cb07b2531e2
|
diff --git a/salt/states/dockerio.py b/salt/states/dockerio.py
index <HASH>..<HASH> 100644
--- a/salt/states/dockerio.py
+++ b/salt/states/dockerio.py
@@ -788,7 +788,7 @@ def run(name,
if not onlyif:
return valid(comment='onlyif execution failed')
elif isinstance(onlyif, string_types):
- if not retcode(cid, onlyif):
+ if not cmd.retcode(onlyif) == 0:
return valid(comment='onlyif execution failed')
if unless is not None:
@@ -796,7 +796,7 @@ def run(name,
if unless:
return valid(comment='unless execution succeeded')
elif isinstance(unless, string_types):
- if retcode(cid, unless):
+ if cmd.retcode(unless) == 0:
return valid(comment='unless execution succeeded')
if docked_onlyif is not None:
|
Fixes #<I> olyif and unless should run on the host
|
saltstack_salt
|
train
|
py
|
4090f13a2cafaaa7279e251cab452f2bccd0e3f8
|
diff --git a/.travis.yml b/.travis.yml
index <HASH>..<HASH> 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,10 +1,11 @@
sudo: false
language: go
go:
- - 1.4.x
- - 1.5.x
- - 1.6.x
- - 1.7.x
+ - 1.4
+ - 1.5
+ - 1.6
+ - 1.7
+ - 1.8
- master
script:
diff --git a/ini.go b/ini.go
index <HASH>..<HASH> 100644
--- a/ini.go
+++ b/ini.go
@@ -37,7 +37,7 @@ const (
// Maximum allowed depth when recursively substituing variable names.
_DEPTH_VALUES = 99
- _VERSION = "1.27.2"
+ _VERSION = "1.27.3"
)
// Version returns current package version literal.
@@ -60,7 +60,7 @@ var (
// Explicitly write DEFAULT section header
DefaultHeader = false
-
+
// Indicate whether to put a line between sections
PrettySection = true
)
diff --git a/struct.go b/struct.go
index <HASH>..<HASH> 100644
--- a/struct.go
+++ b/struct.go
@@ -161,7 +161,7 @@ func setWithProperType(t reflect.Type, key *Key, field reflect.Value, delim stri
}
intVal, err := key.Int64()
- if err != nil || intVal == 0 {
+ if err != nil {
return nil
}
field.SetInt(intVal)
|
struct: remove skipping 0 value for int types (#<I>)
|
go-ini_ini
|
train
|
yml,go,go
|
e7bc6374cf9516462fb7023bba8c06f75ddc3901
|
diff --git a/grimoire_elk/enriched/enrich.py b/grimoire_elk/enriched/enrich.py
index <HASH>..<HASH> 100644
--- a/grimoire_elk/enriched/enrich.py
+++ b/grimoire_elk/enriched/enrich.py
@@ -846,13 +846,13 @@ class Enrich(ElasticItems):
return sh_ids
- def enrich_onion(self, enrich_backend, in_index, out_index, data_source, contribs_field,
- timeframe_field, sort_on_field, no_incremental=False):
+ def enrich_onion(self, enrich_backend, in_index, out_index, data_source,
+ contribs_field, timeframe_field, sort_on_field, no_incremental=False):
logger.info("[Onion] Starting study")
# Creating connections
- es = Elasticsearch([self.elastic.url], timeout=100, verify_certs=self.elastic.requests.verify)
+ es = Elasticsearch([enrich_backend.elastic.url], timeout=100, verify_certs=self.elastic.requests.verify)
in_conn = ESOnionConnector(es_conn=es, es_index=in_index,
contribs_field=contribs_field,
timeframe_field=timeframe_field,
|
[enrich] Replace elasticsearch url to onion study
This code replaces the way of passing the elasticsearch url to
the onion study. Now, the url is taken from the enrich backend
instance.
|
chaoss_grimoirelab-elk
|
train
|
py
|
a85d865d9660a9b26ae5681675cfb9e6806d5bcf
|
diff --git a/core/src/main/java/com/orientechnologies/orient/core/storage/cache/local/twoq/O2QCache.java b/core/src/main/java/com/orientechnologies/orient/core/storage/cache/local/twoq/O2QCache.java
index <HASH>..<HASH> 100755
--- a/core/src/main/java/com/orientechnologies/orient/core/storage/cache/local/twoq/O2QCache.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/storage/cache/local/twoq/O2QCache.java
@@ -739,8 +739,7 @@ public class O2QCache implements OReadCache {
}
}
} catch (OLoadCacheStateException lcse) {
- OLogManager.instance()
- .warn(this, "Cannot restore state of cache for storage placed under " + writeCache.getRootDirectory(), lcse);
+ OLogManager.instance().warn(this, "Cannot restore state of cache for storage placed under " + writeCache.getRootDirectory());
} catch (Exception e) {
throw OException.wrapException(
new OStorageException("Cannot restore state of cache for storage placed under " + writeCache.getRootDirectory()), e);
|
Issue #<I> was fixed.
|
orientechnologies_orientdb
|
train
|
java
|
c56f5288a0f7f12a568c3d0ec5101d9b26a1ec6d
|
diff --git a/lib/ASN1/Universal/Integer.php b/lib/ASN1/Universal/Integer.php
index <HASH>..<HASH> 100644
--- a/lib/ASN1/Universal/Integer.php
+++ b/lib/ASN1/Universal/Integer.php
@@ -85,7 +85,8 @@ class Integer extends Object implements Parsable
public static function fromBinary(&$binaryData, &$offsetIndex = 0)
{
- self::parseIdentifier($binaryData[$offsetIndex], static::getType(), $offsetIndex++);
+ $parsedObject = new static(0);
+ self::parseIdentifier($binaryData[$offsetIndex], $parsedObject->getType(), $offsetIndex++);
$contentLength = self::parseContentLength($binaryData, $offsetIndex, 1);
$isNegative = (ord($binaryData[$offsetIndex]) & 0x80) != 0x00;
|
Non static method shouldn't be called statically
|
fgrosse_PHPASN1
|
train
|
php
|
83d57259457af02d76917662e10ed656647cd86e
|
diff --git a/.github/get_pypi_info.py b/.github/get_pypi_info.py
index <HASH>..<HASH> 100644
--- a/.github/get_pypi_info.py
+++ b/.github/get_pypi_info.py
@@ -29,6 +29,8 @@ def get_info(package_name: str = "") -> dict:
version = resp["info"]["version"]
deps_and_version = {}
for package in resp["info"]["requires_dist"]:
+ if "; extra ==" in package:
+ continue
package_name = package.split("(")[0].replace(" ", "")
package_name = package_name.replace("[pipeline]", "")
if package_name == "tables":
|
Prevent extra to be added in run deps
|
openfisca_openfisca-core
|
train
|
py
|
c140a936155103e3b69744c6db3bb7705a95ded7
|
diff --git a/bundles/org.eclipse.orion.client.ui/web/plugins/filePlugin/fileImpl.js b/bundles/org.eclipse.orion.client.ui/web/plugins/filePlugin/fileImpl.js
index <HASH>..<HASH> 100644
--- a/bundles/org.eclipse.orion.client.ui/web/plugins/filePlugin/fileImpl.js
+++ b/bundles/org.eclipse.orion.client.ui/web/plugins/filePlugin/fileImpl.js
@@ -500,7 +500,10 @@ define(["orion/Deferred", "orion/xhr", "orion/URL-shim", "orion/operation", "ori
var timeout = options && options.timeout ? options.timeout : 15000,
opts = {
timeout: timeout,
- headers: { "Orion-Version": "1" },
+ headers: {
+ "Orion-Version": "1",
+ "Accept": "application/json, *.*"
+ },
log: false
};
if(options && typeof options.readIfExists === 'boolean') {
|
Defect <I> - Orion Web IDE client displays a big huge ugly red bar across the entire sceen on a <I> server error
|
eclipse_orion.client
|
train
|
js
|
ba3108112b41033a1a2b97189061e641859cd8c4
|
diff --git a/tests/test_examples.py b/tests/test_examples.py
index <HASH>..<HASH> 100644
--- a/tests/test_examples.py
+++ b/tests/test_examples.py
@@ -555,7 +555,8 @@ class TestJsConsole(unittest.TestCase):
output = pipe.getvalue()
pipe.close()
- self.assertIn("ReferenceError: console is not defined", output)
+ self.assertNotIn("[log] Log message", output)
+ self.assertNotIn("[err] Error message", output)
if __name__ == '__main__':
unittest.main()
|
Change test_no_js_console to work in later node versions
In the latest node version, `console.log` is defined in `runInThisContext`, so this test would fail
|
common-workflow-language_cwltool
|
train
|
py
|
ec87a5102170e63009dd511d436d23b69a596286
|
diff --git a/ayrton/__init__.py b/ayrton/__init__.py
index <HASH>..<HASH> 100755
--- a/ayrton/__init__.py
+++ b/ayrton/__init__.py
@@ -150,8 +150,8 @@ def polute (d):
'_k', '_p', '_r', '_s', '_u', '_w', '_x', '_L',
'_N', '_S', '_nt', '_ot' ],
'ayrton.expansion': [ 'bash', ],
- 'ayrton.functions': [ 'export', 'run', 'ssh', 'unset', ],
- 'ayrton': [ 'Capture', 'cd', ],
+ 'ayrton.functions': [ 'cd', 'export', 'run', 'ssh', 'unset', ],
+ 'ayrton': [ 'Capture', ],
'sh': [ 'CommandNotFound', ],
}
|
* moved cd() to ayrton.functions.
|
StyXman_ayrton
|
train
|
py
|
dd8659ec952d2bd1c81a3374168f9d55f404f57d
|
diff --git a/lib/web/middleware/logger.js b/lib/web/middleware/logger.js
index <HASH>..<HASH> 100644
--- a/lib/web/middleware/logger.js
+++ b/lib/web/middleware/logger.js
@@ -32,6 +32,11 @@ module.exports = function() {
// proxy end to output loggging
var end = res.end;
+
+ res.on('error', function(err) {
+ log.error('Failed to send a response: ' + err.toString());
+ });
+
res.end = function(chunk, encoding) {
// Make sure the end function actually executes
res.end = end;
|
Add 'error' handler on the request object.
|
racker_dreadnot
|
train
|
js
|
ab05f4e561f1fe1d829e71d62e8c3b38d38e2531
|
diff --git a/pysparkling/sql/expressions/expressions.py b/pysparkling/sql/expressions/expressions.py
index <HASH>..<HASH> 100644
--- a/pysparkling/sql/expressions/expressions.py
+++ b/pysparkling/sql/expressions/expressions.py
@@ -248,3 +248,18 @@ class NullSafeBinaryOperation(BinaryOperation):
def unsafe_operation(self, value1, value2):
raise NotImplementedError
+
+class NullSafeColumnOperation(Expression):
+ def __init__(self, column, *args):
+ super(NullSafeColumnOperation, self).__init__(column, *args)
+ self.column = column
+
+ def eval(self, row, schema):
+ value = self.column.eval(row, schema)
+ return self.unsafe_operation(value)
+
+ def __str__(self):
+ raise NotImplementedError
+
+ def unsafe_operation(self, value):
+ raise NotImplementedError
|
Add a NullSafeColumnOperation class
|
svenkreiss_pysparkling
|
train
|
py
|
a5ce0e94d00711493054a4242a7d5d1a5e3eee9e
|
diff --git a/ui-codemirror.js b/ui-codemirror.js
index <HASH>..<HASH> 100644
--- a/ui-codemirror.js
+++ b/ui-codemirror.js
@@ -91,9 +91,9 @@ angular.module('ui.codemirror', [])
// Override the ngModelController $render method, which is what gets called when the model is updated.
// This takes care of the synchronizing the codeMirror element with the underlying model, in the case that it is changed by something else.
ngModel.$render = function () {
- //Code mirror expects a string so make sure it gets one
- //Although the formatter have already done this, it can be possible that another formatter returns undefined (for example the required directive)
- var safeViewValue = ngModel.$viewValue || '';
+ //Code mirror expects a string so make sure it gets one
+ //Although the formatter have already done this, it can be possible that another formatter returns undefined (for example the required directive)
+ var safeViewValue = ngModel.$viewValue || '';
codeMirror.setValue(safeViewValue);
};
|
[FIX] jshint mixed spaces and tabs.
|
angular-ui_ui-codemirror
|
train
|
js
|
20327c2b5f73092b42c0925b74f4723afd9d315b
|
diff --git a/gpustat/core.py b/gpustat/core.py
index <HASH>..<HASH> 100644
--- a/gpustat/core.py
+++ b/gpustat/core.py
@@ -330,12 +330,10 @@ class GPUStat(object):
return fp
def jsonify(self):
- o = dict(self.entry)
+ o = self.entry.copy()
if self.entry['processes'] is not None:
o['processes'] = [{k: v for (k, v) in p.items() if k != 'gpu_uuid'}
for p in self.entry['processes']]
- else:
- o['processes'] = '({})'.format(NOT_SUPPORTED)
return o
|
json: When process information is unavailable, expose as null
The `processes` field of GPUStat as json is changed to be `null`
instead of a string (NOT_SUPPORTED) when process information is not
available.
Refs: #<I>, #<I>
|
wookayin_gpustat
|
train
|
py
|
15949625c38c13c96789674ffc7688f83e35a1fe
|
diff --git a/watcher.go b/watcher.go
index <HASH>..<HASH> 100644
--- a/watcher.go
+++ b/watcher.go
@@ -190,6 +190,9 @@ func (view *DataView) poll(ch chan *DataView, client *api.Client) {
continue
}
+ // Update the index in case we got a new version, but the data is the same
+ view.LastIndex = qm.LastIndex
+
// Do not trigger a render if the data is the same
// TODO: Does this behave correctly without explicit type conversion?
if reflect.DeepEqual(data, view.Data) {
@@ -197,7 +200,6 @@ func (view *DataView) poll(ch chan *DataView, client *api.Client) {
}
// If we got this far, there is new data!
- view.LastIndex = qm.LastIndex
view.Data = data
ch <- view
}
|
Update the index in case we got a new version, but the data is the same
|
hashicorp_consul-template
|
train
|
go
|
e1f8dedfc08435904a66edd1a8e43b6e0b2e0b4e
|
diff --git a/eventsourcing/examples/wiki/test.py b/eventsourcing/examples/wiki/test.py
index <HASH>..<HASH> 100644
--- a/eventsourcing/examples/wiki/test.py
+++ b/eventsourcing/examples/wiki/test.py
@@ -1,3 +1,4 @@
+from typing import cast
from unittest import TestCase
from uuid import uuid4
@@ -113,12 +114,7 @@ This is a wiki about us!
self.assertEqual(page["modified_by"], user_id)
# Check a snapshot was created by now.
- self.assertTrue(
- len(
- list(
- app.snapshots.get(
- app.repository.get(Index.create_id("welcome-visitors")).ref
- )
- )
- )
- )
+ assert app.snapshots
+ index = cast(Index, app.repository.get(Index.create_id("welcome-visitors")))
+ assert index.ref
+ self.assertTrue(len(list(app.snapshots.get(index.ref))))
|
Fixed mypy issues in test for wiki example.
|
johnbywater_eventsourcing
|
train
|
py
|
4615c14f449d77b6228f8c554928637ba98e3262
|
diff --git a/src/directives/sf-field.directive.js b/src/directives/sf-field.directive.js
index <HASH>..<HASH> 100644
--- a/src/directives/sf-field.directive.js
+++ b/src/directives/sf-field.directive.js
@@ -319,7 +319,7 @@ sfPath, sfSelect) {
// Get the object parent object
let obj = scope.model;
if (key.length > 1) {
- obj = sfSelect(key.splice(0, key.length - 1), obj)
+ obj = sfSelect(key.slice(0, key.length - 1), obj)
}
// parent can be undefined if the form hasn't been filled out
@@ -336,7 +336,7 @@ sfPath, sfSelect) {
return;
}
- delete obj[key[0]];
+ delete obj[key[key.length-1]];
}
}
}
|
don't splice the key, messes with other things
|
json-schema-form_angular-schema-form
|
train
|
js
|
9ddfdf7eb31e6c873a2d533a2f71d6a251c1f237
|
diff --git a/venv_update.py b/venv_update.py
index <HASH>..<HASH> 100644
--- a/venv_update.py
+++ b/venv_update.py
@@ -411,7 +411,7 @@ def do_install(reqs):
reqnames(previously_installed) -
reqnames(required_with_deps) -
reqnames(recently_installed) -
- set(['pip', 'setuptools'])
+ set(['pip', 'setuptools', 'wheel']) # the stage1 bootstrap packages
)
# 2) Uninstall any extraneous packages.
|
wheel isn't extraneous
|
Yelp_venv-update
|
train
|
py
|
75fbc8bb4dfa8717eb3abeb5dcd7d9620d3d0940
|
diff --git a/src/Codeception/Module/DrupalBootstrap.php b/src/Codeception/Module/DrupalBootstrap.php
index <HASH>..<HASH> 100644
--- a/src/Codeception/Module/DrupalBootstrap.php
+++ b/src/Codeception/Module/DrupalBootstrap.php
@@ -94,6 +94,13 @@ class DrupalBootstrap extends Module {
}
/**
+ * @param $uid
+ */
+ public function addUsers ($uid) {
+ $this->users[] = $uid;
+ }
+
+ /**
* @return string
*/
public function getNginxUrl() {
|
Adds method to add users to deletion.
|
guncha25_drupal-8-bootstrap
|
train
|
php
|
8925ddb8d54fa1f1fa84b88732f4e21404415e5e
|
diff --git a/app/models/agent.rb b/app/models/agent.rb
index <HASH>..<HASH> 100644
--- a/app/models/agent.rb
+++ b/app/models/agent.rb
@@ -383,7 +383,14 @@ class Agent < ActiveRecord::Base
agents_to_events = {}
Agent.connection.select_rows(sql).each do |receiver_agent_id, source_agent_type, receiver_agent_type, event_id|
- next unless const_defined?(source_agent_type) && const_defined?(receiver_agent_type)
+
+ begin
+ Object.const_get(source_agent_type)
+ Object.const_get(receiver_agent_type)
+ rescue NameError
+ next
+ end
+
agents_to_events[receiver_agent_id.to_i] ||= []
agents_to_events[receiver_agent_id.to_i] << event_id
end
|
Handle lazy loading of Agents in gems during Agent.receive! (#<I>)
|
huginn_huginn
|
train
|
rb
|
488e3154bef2d852aab5a6887f78b0b5e13e244e
|
diff --git a/shared/chat/conversation/attachment-input/index.js b/shared/chat/conversation/attachment-input/index.js
index <HASH>..<HASH> 100644
--- a/shared/chat/conversation/attachment-input/index.js
+++ b/shared/chat/conversation/attachment-input/index.js
@@ -40,7 +40,8 @@ class RenderAttachmentInput extends Component<void, Props, State> {
render() {
const count = this.props.inputs.length
- const currentTitle = this.props.inputs[this.state.index].title
+ const currentTitle =
+ (this.props.inputs[this.state.index] && this.props.inputs[this.state.index].title) || ''
return (
<PopupDialog onClose={this.props.onClose}>
|
Check inputs for title on attachment input (#<I>)
|
keybase_client
|
train
|
js
|
7e8f865c6e903bca66482f661f29c882332909aa
|
diff --git a/pyaxiom/netcdf/sensors/timeseries.py b/pyaxiom/netcdf/sensors/timeseries.py
index <HASH>..<HASH> 100644
--- a/pyaxiom/netcdf/sensors/timeseries.py
+++ b/pyaxiom/netcdf/sensors/timeseries.py
@@ -265,7 +265,7 @@ class TimeSeries(object):
# Create metadata variable for the sensor_depth
if nc.variables.get('sensor_depth') is None:
logger.info("Setting the special case 'sensor_depth' metadata variable")
- inst_depth = nc.createVariable('sensor_depth', 'f4')
+ inst_depth = nc.createVariable('sensor_depth', get_type(verticals))
inst_depth.units = 'm'
inst_depth.standard_name = 'surface_altitude'
inst_depth.positive = self.vertical_positive
|
Create the height variable as the correct type
|
axiom-data-science_pyaxiom
|
train
|
py
|
767158640ecbb4f675023c68c2bf495f92aa936e
|
diff --git a/wayback-core/src/main/java/org/archive/wayback/webapp/LiveWebAccessPoint.java b/wayback-core/src/main/java/org/archive/wayback/webapp/LiveWebAccessPoint.java
index <HASH>..<HASH> 100644
--- a/wayback-core/src/main/java/org/archive/wayback/webapp/LiveWebAccessPoint.java
+++ b/wayback-core/src/main/java/org/archive/wayback/webapp/LiveWebAccessPoint.java
@@ -87,13 +87,11 @@ public class LiveWebAccessPoint extends LiveWebRequestHandler {
urlString = UrlOperations.fixupHTTPUrlWithOneSlash(urlString);
boolean handled = true;
- if (requireReferrer != null) {
- String ref = httpRequest.getHeader("Referer");
- if ((ref == null) || !ref.contains(requireReferrer)) {
- httpResponse.sendRedirect(inner.getReplayPrefix() + urlString);
- return true;
- }
- }
+ String ref = httpRequest.getHeader("Referer");
+ if ((ref == null) || !skipHost.matcher(ref).find()) {
+ httpResponse.sendRedirect(inner.getReplayPrefix() + urlString);
+ return true;
+ }
WaybackRequest wbRequest = new WaybackRequest();
wbRequest.setAccessPoint(inner);
|
FIX: Liveweb, use same host as skip host for referer filtering
|
iipc_openwayback
|
train
|
java
|
5e7e15a8b5c070bf9b629abf42f38069aabb5c14
|
diff --git a/lib/simple_worker/service.rb b/lib/simple_worker/service.rb
index <HASH>..<HASH> 100644
--- a/lib/simple_worker/service.rb
+++ b/lib/simple_worker/service.rb
@@ -20,7 +20,7 @@ module SimpleWorker
puts "Uploading #{class_name}"
# check whether it should upload again
tmp = Dir.tmpdir()
- md5file = "simple_worker_#{class_name.gsub("::", ".")}.md5"
+ md5file = "simple_worker_#{class_name.gsub("::", ".")}_#{access_key[0,8]}.md5"
existing_md5 = nil
f = File.join(tmp, md5file)
if File.exists?(f)
|
Added part of access_key to md5 to separate same classes in different sw projects.
|
iron-io_iron_worker_ruby
|
train
|
rb
|
7a6926874139661467f05eaff255a49391caf770
|
diff --git a/src/main/java/edu/jhu/nlp/depparse/PosTagDistancePruner.java b/src/main/java/edu/jhu/nlp/depparse/PosTagDistancePruner.java
index <HASH>..<HASH> 100644
--- a/src/main/java/edu/jhu/nlp/depparse/PosTagDistancePruner.java
+++ b/src/main/java/edu/jhu/nlp/depparse/PosTagDistancePruner.java
@@ -126,6 +126,11 @@ public class PosTagDistancePruner implements Trainable, Annotator, Serializable
}
}
}
+ // Always keep a right-branching tree, so that we never prune all trees.
+ for (int c = 0; c < tags.length; c++) {
+ mask.setIsPruned(c-1, c, false);
+ }
+
// Check that there still exists some singly-rooted spanning tree that wasn't pruned.
if (InsideOutsideDepParse.singleRoot && !mask.allowsSingleRootTrees()) {
log.warn("All single-root trees pruned");
|
Always keeping a right branching tree in the POS tag distance pruner
|
mgormley_pacaya
|
train
|
java
|
63abb8bf7b1a6be528a7c76c4d7c9da27c476107
|
diff --git a/lib/starting_blocks.rb b/lib/starting_blocks.rb
index <HASH>..<HASH> 100644
--- a/lib/starting_blocks.rb
+++ b/lib/starting_blocks.rb
@@ -52,7 +52,7 @@ module StartingBlocks
def default_actions
{
- execute: -> { raise 'here you go' },
+ execute: -> { puts ARGV[ARGV.index('execute') + 1] },
watch: -> do
listener = StartingBlocks::Watcher.start_watching Dir, StartingBlocks.options
StartingBlocks.display "Going to sleep, waiting for changes"
|
Spit out the thing next after the execute command.
|
darrencauthon_starting_blocks
|
train
|
rb
|
79ebc6a5fddc652903e438e60cbf74b6b3c8d09b
|
diff --git a/lib/ohai/plugins/windows/filesystem.rb b/lib/ohai/plugins/windows/filesystem.rb
index <HASH>..<HASH> 100644
--- a/lib/ohai/plugins/windows/filesystem.rb
+++ b/lib/ohai/plugins/windows/filesystem.rb
@@ -24,7 +24,7 @@ Ohai.plugin(:Filesystem) do
# @see https://docs.microsoft.com/en-us/windows/desktop/SecProv/getconversionstatus-win32-encryptablevolume#parameters
#
CONVERSION_STATUS = %w{FullyDecrypted FullyEncrypted EncryptionInProgress
- DecryptionInProgress EncryptionPaused DecryptionPaused}.freeze
+ DecryptionInProgress EncryptionPaused DecryptionPaused}.freeze unless defined?(CONVERSION_STATUS)
# Returns a Mash loaded with logical details
#
|
Avoid constant warnings in windows/filesystem plugin
This is very noisy in DK specs.
|
chef_ohai
|
train
|
rb
|
4943813add9cf7d2929cdc239128a34dae3339e2
|
diff --git a/lib/Thelia/Form/SeoFieldsTrait.php b/lib/Thelia/Form/SeoFieldsTrait.php
index <HASH>..<HASH> 100644
--- a/lib/Thelia/Form/SeoFieldsTrait.php
+++ b/lib/Thelia/Form/SeoFieldsTrait.php
@@ -44,13 +44,11 @@ trait SeoFieldsTrait
if (! in_array('meta_title', $exclude))
$this->formBuilder
->add('meta_title', 'text', array(
- 'constraints' => array(
- new NotBlank()
- ),
'label' => Translator::getInstance()->trans('Page Title'),
'label_attr' => array(
'for' => 'meta_title'
- )
+ ),
+ 'required' => false
)
);
|
seo title field is not mandatory anymore
|
thelia_core
|
train
|
php
|
804a3a3a6684ddbde881d416e1f28e144a2aa58f
|
diff --git a/lib/hirefire/macro/bunny.rb b/lib/hirefire/macro/bunny.rb
index <HASH>..<HASH> 100644
--- a/lib/hirefire/macro/bunny.rb
+++ b/lib/hirefire/macro/bunny.rb
@@ -70,9 +70,9 @@ module HireFire
def count_messages(channel, queue_names, options)
queue_names.inject(0) do |sum, queue_name|
- if options.key?("x-max-priority")
+ if options.key?(:'x-max-priority')
queue = channel.queue(queue_name, :durable => options[:durable],
- :arguments => {"x-max-priority" => options["x-max-priority"]})
+ :arguments => {"x-max-priority" => options[:'x-max-priority']})
else
queue = channel.queue(queue_name, :durable => options[:durable])
end
|
Use symbol to stay consistent with other options
|
hirefire_hirefire-resource
|
train
|
rb
|
b6c4a4943786b3341b5808a54173e1d344a894c5
|
diff --git a/gulpfile.js b/gulpfile.js
index <HASH>..<HASH> 100755
--- a/gulpfile.js
+++ b/gulpfile.js
@@ -45,7 +45,7 @@ gulp.task('browser-sync', function() {
server: {
baseDir: __dirname,
index: 'index.html',
- directory: false
+ directory: true
},
files: [],
watchOptions: {
|
fix(gulpfile.js): Allow directory listing in test server.
|
OnsenUI_OnsenUI
|
train
|
js
|
Subsets and Splits
Java Commits in Train Set
Queries for all entries where the diff_languages column is 'java', providing a filtered dataset but without deeper analysis.
Java Commits Test Data
Returns a subset of 5000 entries from the dataset where the programming language difference is Java, providing basic filtering for exploration.
Java Commits Sample
Retrieves the first 1,000 records where the 'diff_languages' column is 'java', providing limited insight into the specific data entries.
Java Commits Validation Sample
Retrieves a sample of entries from the validation dataset where the diff languages are Java, providing limited insight into specific Java-related data points.
Java Commits in Validation
This query retrieves a limited sample of entries from the validation dataset where the programming language difference is Java, providing basic filtering with minimal insight.
Java Commits Sample
This query retrieves a sample of 100 records where the 'diff_languages' is 'java', providing basic filtering but limited analytical value.
Java Commits Sample
Retrieves 100 samples where the language difference is Java, providing basic filtering but minimal analytical value.
Java Commits Sample
Retrieves 10 samples where the diff_languages column is 'java', providing basic examples of data entries with this specific language.
Java Commits Validation Sample
Retrieves 1,000 records where the differences in languages are marked as Java, providing a snapshot of that specific subset but limited to raw data.
Java Commits Sample
This query retrieves 1000 random samples from the dataset where the programming language is Java, offering limited insight beyond raw data.