hash
stringlengths 40
40
| diff
stringlengths 131
26.7k
| message
stringlengths 7
694
| project
stringlengths 5
67
| split
stringclasses 1
value | diff_languages
stringlengths 2
24
|
---|---|---|---|---|---|
5372fe30b09849301d6405f0633810814ac3dce7
|
diff --git a/version.py b/version.py
index <HASH>..<HASH> 100644
--- a/version.py
+++ b/version.py
@@ -42,8 +42,7 @@ def call_git_describe():
stdout=PIPE, stderr=PIPE)
p.stderr.close()
line = p.stdout.readlines()[0]
- return line.strip().decode('utf-8')
-
+ return line.strip().decode('utf-8').split('-')[0]
except:
return None
|
Small fix to the version file to allow it to work windows
|
tomturner_django-tenants
|
train
|
py
|
6b14e10624d07d1f0097c60015c584c18ee72b02
|
diff --git a/lib/nodes/lib.js b/lib/nodes/lib.js
index <HASH>..<HASH> 100644
--- a/lib/nodes/lib.js
+++ b/lib/nodes/lib.js
@@ -65,7 +65,8 @@ registry.decl(LibraryNodeName, Node, /** @lends LibraryNode.prototype */ {
var package = PATH.join(this.getPath(), pkg),
opts = { cwd: PATH.dirname(package) },
- npm = process.env.NPM || 'npm';
+ npm = process.env.NPM || 'npm',
+ npmEnv = process.env.NPM_ENV || 'production';
return QFS.exists(package)
.then(function(exists) {
@@ -79,7 +80,7 @@ registry.decl(LibraryNodeName, Node, /** @lends LibraryNode.prototype */ {
if (!_this.ctx.force) {
LOGGER.finfo('Installing dependencies for %s library (npm install)', _this.target);
- var cmd = npm + ' install';
+ var cmd = npm + ' install --' + npmEnv;
_this.log(cmd);
return U.exec(cmd, opts);
|
LibraryNode: Install production dependencies by default (Close #<I>)
|
bem-archive_bem-tools
|
train
|
js
|
136ef2a9b76e78829fb585b77409738f1ece6167
|
diff --git a/google-images-download.py b/google-images-download.py
index <HASH>..<HASH> 100644
--- a/google-images-download.py
+++ b/google-images-download.py
@@ -8,7 +8,6 @@ import time # Importing the time library to check the time of code execution
import sys # Importing the System Library
import os
import argparse
-import urllib2
import ssl
# Taking command line arguments from users
|
Update google-images-download.py
removing redundant import of urllib2 as it is deprecated in python3
|
hardikvasa_google-images-download
|
train
|
py
|
9a0991d6e2af3fa264fc3d001a63927678762358
|
diff --git a/skitai/skitaid/bin/skitaid-cron.py b/skitai/skitaid/bin/skitaid-cron.py
index <HASH>..<HASH> 100644
--- a/skitai/skitaid/bin/skitaid-cron.py
+++ b/skitai/skitaid/bin/skitaid-cron.py
@@ -17,6 +17,10 @@ def hTERM (signum, frame):
global EXIT_CODE
EXIT_CODE = 0
+def hHUP (signum, frame):
+ global EXIT_CODE
+ EXIT_CODE = 3
+
class CronManager:
def __init__ (self, config, logpath, varpath, consol):
@@ -181,6 +185,7 @@ class CronManager:
signal.signal(signal.SIGTERM, hTERM)
signal.signal(signal.SIGQUIT, hTERM)
+ signal.signal(signal.SIGHUP, hHUP)
def execute (self, cmd):
self.logger ("[info] job starting: %s" % cmd)
@@ -211,8 +216,8 @@ class CronManager:
for i in range (60):
if os.name == "nt" and i % 10 == 0:
self.maintern_shutdown_request (now)
- if EXIT_CODE is not None:
- break
+ if EXIT_CODE is not None:
+ break
time.sleep (1)
|
<I> - cron added
|
hansroh_skitai
|
train
|
py
|
d5bb5976b89217082b7c4149fac3cf87b45e4869
|
diff --git a/elasticsearch/__init__.py b/elasticsearch/__init__.py
index <HASH>..<HASH> 100644
--- a/elasticsearch/__init__.py
+++ b/elasticsearch/__init__.py
@@ -0,0 +1,10 @@
+from __future__ import absolute_import
+
+from elasticsearch.client import Elasticsearch
+from elasticsearch.transport import Transport
+from elasticsearch.connection_pool import ConnectionPool, ConnectionSelector, \
+ RoundRobinSelector
+from elasticsearch.serializer import JSONSerializer
+from elasticsearch.connection import Connection, RequestsHttpConnection
+from elasticsearch.exceptions import *
+
diff --git a/elasticsearch/exceptions.py b/elasticsearch/exceptions.py
index <HASH>..<HASH> 100644
--- a/elasticsearch/exceptions.py
+++ b/elasticsearch/exceptions.py
@@ -1,3 +1,5 @@
+__all__ = ['ElastiSearchException', 'SerializationError', 'TransportError', 'NotFoundError']
+
class ElastiSearchException(Exception):
pass
|
Expose everything on elasticsearch package itself
|
elastic_elasticsearch-py
|
train
|
py,py
|
4521cd5ee579be973cd441391f7b6090711bd0bf
|
diff --git a/src/Admin/Updater.php b/src/Admin/Updater.php
index <HASH>..<HASH> 100755
--- a/src/Admin/Updater.php
+++ b/src/Admin/Updater.php
@@ -99,12 +99,13 @@ class Updater
return 0;
}
+ // Have DB already?
+ new MigrationEntityRepository;
+
// Check we have DB structure and any migration applied
$migrated_files = [];
if (SQL::getTables()) {
- // Have DB already?
- $migrations = new MigrationEntityRepository();
- $migrated_files = $migrations->getPairs('filename', 'filename');
+ $migrated_files = MigrationEntityRepository::getInstance()->getPairs('filename', 'filename');
}
$existing_files = FileSystem::scanDirs(DIR_MIGRATIONS);
|
Auto-create table before first migration to prevent error
|
devp-eu_tmcms-core
|
train
|
php
|
69acf6f5448a4ebd8d201bae2b501c02d92dd0b7
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -213,7 +213,12 @@ setup(
packages=find_packages(exclude=['test', 'test.*']),
install_requires=['colorama', 'enum-compat', 'packaging', 'six'],
- extras_require={'msbuild': ['lxml']},
+ extras_require={
+ 'msbuild': ['lxml'],
+ 'lint': ['flake8'],
+ 'doc': ['mkdocs', 'mkdocs-bootswatch'],
+ 'deploy': ['pypandoc'],
+ },
entry_points={
'console_scripts': [
|
Add some extras to setup.py to make setting up a dev environment easier
|
jimporter_bfg9000
|
train
|
py
|
d66433a8e827c97a07b857fa54aabb7c3d20f75f
|
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index <HASH>..<HASH> 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -34,8 +34,8 @@ end
def read_bitstamp_yaml
Bitstamp.setup do |config|
- yaml = YAML::load( File.read('bit_stamp_details.yml') )
- config.key = yaml['bitstamp']['id']
- config.secret = yaml['bitstamp']['password']
+ raise "You must set environment variable BITSTAMP_KEY and BITSTAMP_SECRET with your username and password to run specs." if ENV['BITSTAMP_KEY'].nil? or ENV['BITSTAMP_SECRET'].nil?
+ config.key = ENV['BITSTAMP_KEY']
+ config.secret = ENV['BITSTAMP_SECRET']
end
end
|
Uses environment variables instead of bit_stamp_details.yml on specs
|
kojnapp_bitstamp
|
train
|
rb
|
2a0462ab387dcbdfc464e899c4800e0d61a9d739
|
diff --git a/parboiled-core/src/main/java/org/parboiled/parserunners/ReportingParseRunner.java b/parboiled-core/src/main/java/org/parboiled/parserunners/ReportingParseRunner.java
index <HASH>..<HASH> 100644
--- a/parboiled-core/src/main/java/org/parboiled/parserunners/ReportingParseRunner.java
+++ b/parboiled-core/src/main/java/org/parboiled/parserunners/ReportingParseRunner.java
@@ -70,7 +70,7 @@ public class ReportingParseRunner<V> extends AbstractParseRunner<V> {
resetValueStack();
result = runLocatingMatch(inputBuffer);
Preconditions.checkState(!result.matched); // we failed before so we should really be failing again
- Preconditions.checkState(result.parseErrors.size() == 1);
+ Preconditions.checkState(result.parseErrors.size() >= 1); // may be more than one in case of custom ActionExceptions
// finally perform a third, reporting run (now that we know the error location)
resetValueStack();
|
core: fix incorrect assertion in ReportingParseRunner
|
sirthias_parboiled
|
train
|
java
|
0f169493a2614328986f6f4a8c2c7a1943223747
|
diff --git a/activerecord/lib/active_record/identity_map.rb b/activerecord/lib/active_record/identity_map.rb
index <HASH>..<HASH> 100644
--- a/activerecord/lib/active_record/identity_map.rb
+++ b/activerecord/lib/active_record/identity_map.rb
@@ -29,15 +29,15 @@ module ActiveRecord
end
def get(class_name, primary_key)
- current[[class_name, primary_key]]
+ current[[class_name, primary_key.to_s]]
end
def add(record)
- current[[record.class.name, record.id]] = record
+ current[[record.class.name, record.id.to_s]] = record
end
def remove(record)
- current.delete([record.class.name, record.id])
+ current.delete([record.class.name, record.id.to_s])
end
def clear
|
Use strings primary keys in identity map keys to avoid problems with casting and also allow strings primary keys.
|
rails_rails
|
train
|
rb
|
f1b47a0f480186957a08cdffac6c2115423a3887
|
diff --git a/test/test_generic_spreadsheet.rb b/test/test_generic_spreadsheet.rb
index <HASH>..<HASH> 100644
--- a/test/test_generic_spreadsheet.rb
+++ b/test/test_generic_spreadsheet.rb
@@ -50,6 +50,24 @@ class TestGenericSpreadsheet < Test::Unit::TestCase
end
end
+ context "Roo::GenericSpreadsheet.number_to_letter" do
+ should "return 'A' when passed 1" do
+ assert_equal 'A',Roo::GenericSpreadsheet.number_to_letter(1)
+ end
+
+ should "return 'Z' when passed 26" do
+ assert_equal 'Z',Roo::GenericSpreadsheet.number_to_letter(26)
+ end
+
+ should "return 'AA' when passed 27" do
+ assert_equal 'AA',Roo::GenericSpreadsheet.number_to_letter(27)
+ end
+
+ should "return the correct letter when passed a Float" do
+ assert_equal 'A',Roo::GenericSpreadsheet.number_to_letter(1.0)
+ end
+ end
+
def test_setting_invalid_type_does_not_update_cell
@oo.set(1,1,1)
assert_raise(ArgumentError){@oo.set(1,1, :invalid_type)}
|
add tests against_number_to_letter
|
roo-rb_roo
|
train
|
rb
|
fbb1b81a5184b21ce3f0fb62a148ee1082b7338b
|
diff --git a/xone/__init__.py b/xone/__init__.py
index <HASH>..<HASH> 100644
--- a/xone/__init__.py
+++ b/xone/__init__.py
@@ -1,6 +1,6 @@
"""Frequently used functions for financial data analysis"""
-__version__ = '0.0.8'
+__version__ = '0.0.9'
__submodules__ = [
'utils',
|
add <I> support for pypi
|
alpha-xone_xone
|
train
|
py
|
c91ed284da579137f46ad870548c7f06940361cc
|
diff --git a/pyipma/forecast.py b/pyipma/forecast.py
index <HASH>..<HASH> 100644
--- a/pyipma/forecast.py
+++ b/pyipma/forecast.py
@@ -101,5 +101,8 @@ class Forecast:
return self._weather_type_description[self.weather_type].descIdWeatherTypePT
def __repr__(self):
+ if self.humidity is None:
+ return f"Forecast for {self._global_id_local} at {self._time}: \
+ {self.temperature}°C, {self.weather_type_description}"
return f"Forecast for {self._global_id_local} at {self._time}: \
{self.temperature}°C, {self.humidity}%, {self.weather_type_description}"
|
dont print humidity when there is no humidity
|
dgomes_pyipma
|
train
|
py
|
3ec4a48484cd85afde3edb4ea30742960dd88df3
|
diff --git a/DependencyInjection/Configuration.php b/DependencyInjection/Configuration.php
index <HASH>..<HASH> 100644
--- a/DependencyInjection/Configuration.php
+++ b/DependencyInjection/Configuration.php
@@ -106,7 +106,7 @@ class Configuration
->scalarNode('update')->defaultTrue()->end()
->scalarNode('delete')->defaultTrue()->end()
->scalarNode('service')->end()
- ->scalarNode('is_indexable_callback')->defaultNull()->end()
+ ->variableNode('is_indexable_callback')->defaultNull()->end()
->end()
->end()
->arrayNode('finder')
@@ -184,7 +184,7 @@ class Configuration
->scalarNode('update')->defaultTrue()->end()
->scalarNode('delete')->defaultTrue()->end()
->scalarNode('service')->end()
- ->scalarNode('is_indexable_callback')->defaultNull()->end()
+ ->variableNode('is_indexable_callback')->defaultNull()->end()
->end()
->end()
->arrayNode('finder')
|
Allow variable types for is_indexable_callback options
Actual checking (string, array callback or Closure) is done in the Listener class itself.
|
FriendsOfSymfony_FOSElasticaBundle
|
train
|
php
|
9526fd9c24e248f3339e3d6c9f42b4d927eb63c8
|
diff --git a/spec/specHelpers/factory.js b/spec/specHelpers/factory.js
index <HASH>..<HASH> 100644
--- a/spec/specHelpers/factory.js
+++ b/spec/specHelpers/factory.js
@@ -55,7 +55,7 @@ FactoryProto.prototype.buildMockResearcher = function( expectedValue, multiValue
},
/**
- * Return research data if available.
+ * Check whether morphology data is available.
*
* @returns {boolean} True if the researcher has access to morphology data.
*/
|
Update spec/specHelpers/factory.js
|
Yoast_YoastSEO.js
|
train
|
js
|
ee07b0e045153928f3c0e95664580d1cf4859064
|
diff --git a/src/Element/Identify.php b/src/Element/Identify.php
index <HASH>..<HASH> 100644
--- a/src/Element/Identify.php
+++ b/src/Element/Identify.php
@@ -8,6 +8,11 @@ trait Identify
{
array_unshift($this->prefix, $prefix);
}
+
+ public function prefixId($prefix)
+ {
+ $this->prefixId = $prefix;
+ }
public function name()
{
@@ -25,6 +30,9 @@ trait Identify
if ($this->prefix) {
$id = implode('-', $this->prefix)."-$id";
}
+ if ($this->prefixId) {
+ $id = $this->prefixId.'-'.$id;
+ }
$id = preg_replace('/[\W]+/', '-', $id);
return trim(preg_replace('/[-]+/', '-', $id), '-');
}
|
this needs a special method; the prefix for the id should NOT show up in names
|
monolyth-php_formulaic
|
train
|
php
|
fa2dd2132d4b35a15d9d7f24f1d830c32b590bf3
|
diff --git a/lib/app/plugins.js b/lib/app/plugins.js
index <HASH>..<HASH> 100644
--- a/lib/app/plugins.js
+++ b/lib/app/plugins.js
@@ -32,11 +32,13 @@ module.exports.loadFeatures = function (config) {
);
}
+ var featureTasks = _.get(feature, 'tasks', []);
+
return {
featureId: featureId,
featureDef: featureDef,
featureConstructor : feature,
- tasks: []
+ tasks: featureTasks
};
});
};
diff --git a/lib/build/build-helpers.js b/lib/build/build-helpers.js
index <HASH>..<HASH> 100644
--- a/lib/build/build-helpers.js
+++ b/lib/build/build-helpers.js
@@ -82,8 +82,7 @@
// add tasks declared by features to build
var features = plugins.loadFeatures(configuration);
_.each(features, function(featureDescription) {
- var featureTasks = _.get(featureDescription, 'featureConstructor.tasks', []);
- _.each(featureTasks, function (task) {
+ _.each(featureDescription.tasks, function (task) {
// wrap task runner function to closure which passes required parameters to call, which takes only global configuration as parameter and pass feature config implicitly
tasks.push({
name: task.name,
|
Moved code which figures out where to look for tasks to better place
|
Vincit_dodo.js
|
train
|
js,js
|
47c61c386abee46b36f0ed78b75691dac9def9b3
|
diff --git a/better_exchook.py b/better_exchook.py
index <HASH>..<HASH> 100644
--- a/better_exchook.py
+++ b/better_exchook.py
@@ -169,6 +169,8 @@ def better_exchook(etype, value, tb):
output(' ' + ".".join(token) + " = " + tokenvalue)
alreadyPrintedLocals.add(token)
if len(alreadyPrintedLocals) == 0: output(" no locals")
+ else:
+ output(' -- code not available --')
_tb = _tb.tb_next
n += 1
|
print small error when the code is not available
|
albertz_py_better_exchook
|
train
|
py
|
c158f9ebfc63c234271086f372e3762b2f6f62b4
|
diff --git a/core/frontend/src/cards/js/video.js b/core/frontend/src/cards/js/video.js
index <HASH>..<HASH> 100644
--- a/core/frontend/src/cards/js/video.js
+++ b/core/frontend/src/cards/js/video.js
@@ -14,7 +14,6 @@
const currentTimeContainer = videoElementContainer.querySelector('.kg-video-current-time');
const largePlayIcon = videoElementContainer.querySelector('.kg-video-large-play-icon');
const videoOverlay = videoElementContainer.querySelector('.kg-video-overlay');
- let hasStartedPlayback = false;
let playbackRates = [{
rate: 0.75,
label: '0.7×'
@@ -111,7 +110,7 @@
videoElementContainer.onmouseleave = () => {
const isPlaying = !!(videoEl.currentTime > 0 && !videoEl.paused && !videoEl.ended && videoEl.readyState > 2);
- if (isPlaying || hasStartedPlayback) {
+ if (isPlaying) {
videoPlayerContainer.classList.add("kg-video-hide-animated");
}
}
@@ -128,7 +127,6 @@
});
videoEl.onplay = () => {
- hasStartedPlayback = true;
largePlayIcon.classList.add("kg-video-hide-animated");
videoOverlay.classList.add("kg-video-hide-animated");
playIconContainer.classList.add("kg-video-hide");
|
Refined video controls hidden behavior
no refs
This reverts commit <I>fafab<I>effa<I>c<I>f<I>f.
|
TryGhost_Ghost
|
train
|
js
|
3a6eafff1e3f12b14a80b327f76c65a136d812ba
|
diff --git a/salt/minion.py b/salt/minion.py
index <HASH>..<HASH> 100644
--- a/salt/minion.py
+++ b/salt/minion.py
@@ -1638,7 +1638,6 @@ class Minion(MinionBase):
else:
ret['return'] = return_data
-
retcode = minion_instance.functions.pack['__context__'].get(
'retcode',
0
|
Remove extra blank line to appease pylint
|
saltstack_salt
|
train
|
py
|
ec2afcee059dcb4322cfb9144d797baeae1107ff
|
diff --git a/test/archiver.js b/test/archiver.js
index <HASH>..<HASH> 100644
--- a/test/archiver.js
+++ b/test/archiver.js
@@ -13,7 +13,6 @@ var WriteHashStream = helpers.WriteHashStream;
var binaryBuffer = helpers.binaryBuffer;
var archiver = require('../');
-var ArchiverCore = require('./lib/core');
var testBuffer = binaryBuffer(1024 * 16);
|
tests: restructure a bit.
|
archiverjs_node-archiver
|
train
|
js
|
2c577466c35d4925ec966cef4d695f52206fe322
|
diff --git a/tests/test_gnupg.py b/tests/test_gnupg.py
index <HASH>..<HASH> 100644
--- a/tests/test_gnupg.py
+++ b/tests/test_gnupg.py
@@ -675,9 +675,9 @@ analysis of different kinds of data (temperature, humidity, etc.) coming from
a WSN while ensuring both end-to-end encryption and hop-by-hop
authentication."""
encrypted = str(gpg.encrypt(message, dijk))
+ log.debug("Plaintext: %s" % message)
+ log.debug("Encrypted: %s" % encrypted)
self.assertNotEqual(message, encrypted)
- self.assertNotEqual(encrypted, '')
- self.assertGreater(len(encrypted), 0)
def test_encryption_alt_encoding(self):
"""Test encryption with latin-1 encoding"""
|
Add two helpful debug statements, remove two superfluous asserts from unittest.
|
isislovecruft_python-gnupg
|
train
|
py
|
82c36da2877f80294599ba0e4f42c36dd4aa2872
|
diff --git a/generators/gae/index.js b/generators/gae/index.js
index <HASH>..<HASH> 100644
--- a/generators/gae/index.js
+++ b/generators/gae/index.js
@@ -770,7 +770,12 @@ module.exports = class extends BaseGenerator {
return {
productionBuild() {
if (this.abort) return;
-
+ // Until issue; https://github.com/GoogleCloudPlatform/app-gradle-plugin/issues/376 is fixed we shall disable .gcloudignore
+ this.log(
+ chalk.bold(
+ 'Due to a Bug in GCloud SDK you will need to disable the generation of .gcloudignore file before deploying using: "gcloud config set gcloudignore/enabled false". For more info refer: https://github.com/GoogleCloudPlatform/app-gradle-plugin/issues/376'
+ )
+ );
if (this.buildTool === 'maven') {
this.log(chalk.bold('Deploy to App Engine: ./mvnw package appengine:deploy -DskipTests -Pgae,prod,prod-gae'));
} else if (this.buildTool === 'gradle') {
|
fix: add info to disable .gcloudignore file generation until GCloud SDK bug is fixed
This gives users information to disable .gcloudignore file generation until <URL>
|
jhipster_generator-jhipster
|
train
|
js
|
3388e87d87f8ff859070a153d05b6c23b27c88a5
|
diff --git a/seleniumbase/fixtures/base_case.py b/seleniumbase/fixtures/base_case.py
index <HASH>..<HASH> 100755
--- a/seleniumbase/fixtures/base_case.py
+++ b/seleniumbase/fixtures/base_case.py
@@ -420,11 +420,15 @@ class BaseCase(unittest.TestCase):
def go_back(self):
self.__last_page_load_url = None
self.driver.back()
+ if self.browser == "safari":
+ self.driver.refresh()
self.wait_for_ready_state_complete()
def go_forward(self):
self.__last_page_load_url = None
self.driver.forward()
+ if self.browser == "safari":
+ self.driver.refresh()
self.wait_for_ready_state_complete()
def is_element_present(self, selector, by=By.CSS_SELECTOR):
|
Fix a Safari bug with forward and backward page navigation
|
seleniumbase_SeleniumBase
|
train
|
py
|
822ee86aebc3bc9ae58442676be1567276675a01
|
diff --git a/owo/bg.py b/owo/bg.py
index <HASH>..<HASH> 100644
--- a/owo/bg.py
+++ b/owo/bg.py
@@ -6,6 +6,8 @@ main use intended for mobile devices
usage: `$ owo-bg -p path -k API_KEY`
"""
+from __future__ import print_function
+
import argparse
import owo
import os
diff --git a/owo/cli.py b/owo/cli.py
index <HASH>..<HASH> 100644
--- a/owo/cli.py
+++ b/owo/cli.py
@@ -1,5 +1,7 @@
#!/usr/bin/env python3.5
+from __future__ import print_function
+
import argparse
import owo
|
Add py2 support for `owo-bg` and `owo-cli`
|
whats-this_owo.py
|
train
|
py,py
|
9f63315678ee7822d14ba8c578dbf02aefcd90bb
|
diff --git a/core/src/main/java/tech/tablesaw/io/TypeUtils.java b/core/src/main/java/tech/tablesaw/io/TypeUtils.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/tech/tablesaw/io/TypeUtils.java
+++ b/core/src/main/java/tech/tablesaw/io/TypeUtils.java
@@ -110,6 +110,16 @@ public final class TypeUtils {
DateTimeFormatter.ofPattern("M/d/yy H:mm");
private static final DateTimeFormatter dtTimef8 =
DateTimeFormatter.ofPattern("M/d/yyyy h:mm:ss a");
+
+ static {
+ dtTimef6 = new DateTimeFormatterBuilder()
+ .parseCaseInsensitive()
+ .append(DateTimeFormatter.ISO_LOCAL_DATE_TIME)
+ .appendLiteral('.')
+ .appendPattern("SSS")
+ .toFormatter();
+ }
+
// A formatter that handles date time formats defined above
public static final DateTimeFormatter DATE_TIME_FORMATTER =
new DateTimeFormatterBuilder()
@@ -215,15 +225,6 @@ public final class TypeUtils {
//, timef7
);
- static {
- dtTimef6 = new DateTimeFormatterBuilder()
- .parseCaseInsensitive()
- .append(DateTimeFormatter.ISO_LOCAL_DATE_TIME)
- .appendLiteral('.')
- .appendPattern("SSS")
- .toFormatter();
- }
-
/**
* Private constructor to prevent instantiation
*/
|
fixed core/src/main/java/tech/tablesaw/io/TypeUtils.java, which broke in reformatting
|
jtablesaw_tablesaw
|
train
|
java
|
8af7aacd7a75c34e88b9a66176bc69982ee85b72
|
diff --git a/p2p/host/autonat/addr.go b/p2p/host/autonat/addr.go
index <HASH>..<HASH> 100644
--- a/p2p/host/autonat/addr.go
+++ b/p2p/host/autonat/addr.go
@@ -8,14 +8,22 @@ import (
var private4, private6 []*net.IPNet
var privateCIDR4 = []string{
+ // localhost
+ "127.0.0.0/8",
+ // private networks
"10.0.0.0/8",
+ "100.64.0.0/10",
"172.16.0.0/12",
"192.168.0.0/16",
- "100.64.0.0/10",
+ // link local
"169.254.0.0/16",
}
var privateCIDR6 = []string{
+ // localhost
+ "::1/128",
+ // ULA reserved
"fc00::/7",
+ // link local
"fe80::/10",
}
|
add localhost to private addr ranges
|
libp2p_go-libp2p
|
train
|
go
|
63cc61de1396974b4a8f48af393897bc93817734
|
diff --git a/ffpyplayer/__init__.py b/ffpyplayer/__init__.py
index <HASH>..<HASH> 100644
--- a/ffpyplayer/__init__.py
+++ b/ffpyplayer/__init__.py
@@ -9,7 +9,7 @@ import platform
__all__ = ('dep_bins', )
-__version__ = '4.3.4'
+__version__ = '4.3.5.dev0'
version = __version__
# the ffmpeg src git version tested and upto date with,
|
Bump to <I>.dev0.
|
matham_ffpyplayer
|
train
|
py
|
acdedd3a1c0e9a6aaf14290c1928d1968cf71dc6
|
diff --git a/examples/get_secure_user_falco_rules.py b/examples/get_secure_user_falco_rules.py
index <HASH>..<HASH> 100755
--- a/examples/get_secure_user_falco_rules.py
+++ b/examples/get_secure_user_falco_rules.py
@@ -21,7 +21,7 @@ sdc_token = sys.argv[1]
#
# Instantiate the SDC client
#
-sdclient = SdSecureClient(sdc_token, 'https://secure-staging.sysdig.com')
+sdclient = SdSecureClient(sdc_token, 'https://secure.sysdig.com')
#
# Get the configuration
|
Use normal, not staging, environment.
Initialize the client against our normal secure host, not our staging host.
|
draios_python-sdc-client
|
train
|
py
|
07c80b8c3777b506afe95c509d787d333ed06dc1
|
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index <HASH>..<HASH> 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -1,8 +1,5 @@
require 'rubygems'
require 'spec'
-require 'net-http-spy'
-
-Net::HTTP.http_logger_options = {:verbose => true}
$LOAD_PATH.unshift(File.dirname(__FILE__))
$LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
|
remove net-http-spy from specs
|
rlivsey_rspreedly
|
train
|
rb
|
f519093db1ab23a93cec513fc82d9041660118e7
|
diff --git a/src/sap.ui.core/src/sap/ui/core/Configuration.js b/src/sap.ui.core/src/sap/ui/core/Configuration.js
index <HASH>..<HASH> 100644
--- a/src/sap.ui.core/src/sap/ui/core/Configuration.js
+++ b/src/sap.ui.core/src/sap/ui/core/Configuration.js
@@ -707,6 +707,7 @@ sap.ui.define([
* @returns {string} The configured IANA timezone ID, e.g. "America/New_York"
* @public
* @since 1.99.0
+ * @experimental As of version 1.99.0, time zones are currently not supported by all controls.
*/
getTimezone : function () {
return this.timezone;
@@ -809,6 +810,7 @@ sap.ui.define([
* @public
* @return {this} <code>this</code> to allow method chaining
* @since 1.99.0
+ * @experimental As of version 1.99.0, time zones are currently not supported by all controls.
*/
setTimezone : function (sTimezone) {
check(sTimezone == null || typeof sTimezone === 'string',
|
[INTERNAL] sap.ui.core.Configuration: Mark getTimezone/setTimezone as experimental
Time zones are currently not supported by all controls.
Change-Id: I<I>abaf<I>ca<I>d<I>b4c<I>a5c3b<I>
BCP: <I>
|
SAP_openui5
|
train
|
js
|
985f11c60a3f4dda917f76a2f4b98b5f9a126bb2
|
diff --git a/src/sap.ui.unified/test/sap/ui/unified/qunit/ColorPicker.qunit.js b/src/sap.ui.unified/test/sap/ui/unified/qunit/ColorPicker.qunit.js
index <HASH>..<HASH> 100644
--- a/src/sap.ui.unified/test/sap/ui/unified/qunit/ColorPicker.qunit.js
+++ b/src/sap.ui.unified/test/sap/ui/unified/qunit/ColorPicker.qunit.js
@@ -21,7 +21,7 @@ sap.ui.define([
QUnit.test("Responsive mode", function (oAssert) {
// Arrange
var oHelper = sap.ui.unified.ColorPickerHelper,
- oFactory = oHelper.factory,
+ oFactory = sap.ui.unified.ColorPickerHelper.factory,
oRBGroup,
oInput,
oSlider;
|
[FIX][INTERNAL] sap.ui.unified.ColorPicker: qunit tests are working now
BCP: <I>
Change-Id: I<I>f<I>b3dede<I>ce<I>aa<I>ccf4d<I>fb<I>
|
SAP_openui5
|
train
|
js
|
081782fd448d473a6c4977948856cf7cc1ab200f
|
diff --git a/spec/flipper/middleware/local_cache_spec.rb b/spec/flipper/middleware/local_cache_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/flipper/middleware/local_cache_spec.rb
+++ b/spec/flipper/middleware/local_cache_spec.rb
@@ -7,12 +7,6 @@ require 'flipper/adapters/memory'
describe Flipper::Middleware::LocalCache do
include Rack::Test::Methods
- class Enum < Struct.new(:iter)
- def each(&block)
- iter.call(&block)
- end
- end
-
let(:source) { {} }
let(:memory_adapter) { Flipper::Adapters::Memory.new(source) }
let(:adapter) { Flipper::Adapters::OperationLogger.new(memory_adapter) }
|
No need for enum in local cache middleware spec.
|
jnunemaker_flipper
|
train
|
rb
|
58ace03c3f77ab14f72762818acdde091eb12aef
|
diff --git a/rootpy/tree/tree.py b/rootpy/tree/tree.py
index <HASH>..<HASH> 100644
--- a/rootpy/tree/tree.py
+++ b/rootpy/tree/tree.py
@@ -557,6 +557,9 @@ class TreeChain(object):
self.name = name
self.__queue_mode = False
+ # For some reason, multiprocessing.queues d.n.e. until
+ # one has been created
+ dummy_queue = multiprocessing.Queue()
if isinstance(files, multiprocessing.queues.Queue):
self.__queue_mode = True
elif isinstance(files, tuple):
|
Hack fix for bug where multiprocessing.queues is undefined until a Queue() is created
|
rootpy_rootpy
|
train
|
py
|
79b8b2db9d302e660fd76aefdc0e6ff1937cddde
|
diff --git a/resputil/resputil.go b/resputil/resputil.go
index <HASH>..<HASH> 100644
--- a/resputil/resputil.go
+++ b/resputil/resputil.go
@@ -72,6 +72,9 @@ func JSON(w http.ResponseWriter, responses ...interface{}) {
switch value := response.(type) {
case nil:
continue
+ case func() (interface{}, error):
+ result, err := value()
+ JSON(w, result, err)
case func() error:
err := value()
if err == nil {
|
update with func() (interface{}, error) signature
|
titpetric_factory
|
train
|
go
|
3942fd6e4845234b827ec6d755d3f2f3ee99a9fb
|
diff --git a/benchexec/tablegenerator/__init__.py b/benchexec/tablegenerator/__init__.py
index <HASH>..<HASH> 100755
--- a/benchexec/tablegenerator/__init__.py
+++ b/benchexec/tablegenerator/__init__.py
@@ -1204,7 +1204,7 @@ def main(args=None):
if outputPath == '-':
# write to stdout
outputFilePattern = '-'
- outputPath = None
+ outputPath = '.'
else:
outputFilePattern = "{name}.{type}.{ext}"
|
Change base path for relative links in HTML tables
when writing tables to stdout with "-o -".
Now the base path is always the current directory.
|
sosy-lab_benchexec
|
train
|
py
|
79971716f50bd5f5dbd6d94a8495d2245c5b8c9a
|
diff --git a/youtube/lang/fr.js b/youtube/lang/fr.js
index <HASH>..<HASH> 100644
--- a/youtube/lang/fr.js
+++ b/youtube/lang/fr.js
@@ -20,5 +20,6 @@ CKEDITOR.plugins.setLang('youtube', 'fr', {
noHeight : 'Vous devez saisir une hauteur',
invalidHeight : 'La hauteur saisie est invalide',
invalidTime : 'Le temps de départ de la vidéo est invalide',
- txtResponsive : 'Responsive video'
+ txtResponsive : 'Responsive video',
+ txtNoEmbed : 'Vidéo image et lien seulement'
});
|
Update fr.js with txtNoEmbed translation
|
fonini_ckeditor-youtube-plugin
|
train
|
js
|
46e0692deebde2a098d3c23427222420e0749830
|
diff --git a/app/models/booking.rb b/app/models/booking.rb
index <HASH>..<HASH> 100644
--- a/app/models/booking.rb
+++ b/app/models/booking.rb
@@ -111,14 +111,7 @@ class Booking < ActiveRecord::Base
# Standard methods
def to_s(format = :default)
case format
- when :short
- "%s: %s / %s CHF %s" % [
- value_date ? value_date : '?',
- credit_account ? credit_account.code : '?',
- debit_account ? debit_account.code : '?',
- amount ? "%0.2f" % amount : '?',
- ]
- else
+ when :long
"%s: %s an %s CHF %s, %s (%s)" % [
value_date ? value_date : '?',
credit_account ? "#{credit_account.title} (#{credit_account.code})" : '?',
@@ -127,6 +120,13 @@ class Booking < ActiveRecord::Base
title.present? ? title : '?',
comments.present? ? comments : '?'
]
+ else
+ "%s: %s / %s CHF %s" % [
+ value_date ? value_date : '?',
+ credit_account ? credit_account.code : '?',
+ debit_account ? debit_account.code : '?',
+ amount ? "%0.2f" % amount : '?',
+ ]
end
end
|
Use short Booking.to_s format as default.
|
huerlisi_has_accounts
|
train
|
rb
|
71e71de5e054bcbbb01a06ed2b675c04bfb424ee
|
diff --git a/blockstack_cli_0.14.1/blockstore_client/user.py b/blockstack_cli_0.14.1/blockstore_client/user.py
index <HASH>..<HASH> 100644
--- a/blockstack_cli_0.14.1/blockstore_client/user.py
+++ b/blockstack_cli_0.14.1/blockstore_client/user.py
@@ -76,20 +76,9 @@ def parse_user( user_json ):
except Exception, e:
# not valid json
traceback.print_exc()
- print "Can't load '%s'" % user_json
+ print >> sys.stderr, "Can't load '%s'" % user_json
return None
- # verify that this is a valid user record
- valid = schema_match( USER_SCHEMA, user )
- if not valid:
-
- # could be reserved
- valid = schema_match( RESERVED_USER_SCHEMA, user )
- if not valid:
-
- print "invalid schema '%s'" % user_json
- return None
-
return user
|
Don't be strict about the zone file schema
|
blockstack_blockstack-core
|
train
|
py
|
dc48e4c315fc8288d2230da026aa33c7cbeada20
|
diff --git a/examples/slim/index.php b/examples/slim/index.php
index <HASH>..<HASH> 100644
--- a/examples/slim/index.php
+++ b/examples/slim/index.php
@@ -31,18 +31,22 @@ $app->add(
// Make some routes:
$app->get(
- '/', function () {
- echo sprintf(
- '<a href="%s/%s">%s</a>', '/slim/fail', rand(0, 999999),
- 'trigger an error'
- );
-}
+ '/',
+ function () {
+ echo sprintf(
+ '<a href="%s/%s">%s</a>',
+ '/slim/fail',
+ rand(0, 999999),
+ 'trigger an error'
+ );
+ }
);
$app->get(
- '/fail/:id', function ($id) {
- throw new Exception('bleh! ' . $id);
-}
+ '/fail/:id',
+ function ($id) {
+ throw new Exception('bleh! ' . $id);
+ }
);
// Run the app.
|
Bring slim index example to PSR-2 compliance
|
honeybadger-io_honeybadger-php
|
train
|
php
|
e60202b9a1ae592100a01340d7559c8e479be183
|
diff --git a/src/Event/View/ViewViewTabsListener.php b/src/Event/View/ViewViewTabsListener.php
index <HASH>..<HASH> 100644
--- a/src/Event/View/ViewViewTabsListener.php
+++ b/src/Event/View/ViewViewTabsListener.php
@@ -234,12 +234,6 @@ class ViewViewTabsListener implements EventListenerInterface
{
$content = [];
- // In case the application modified tab configuration,
- // merging it down and passing to the element.
- if (isset($event->result['tab']) && !empty($event->result['tab'])) {
- $options['tab'] = array_merge($options['tab'], $event->result['tab']);
- }
-
$params = $request->params;
$table = $params['controller'];
|
Moving tabs permission check into TabLists event (task #<I>)
|
QoboLtd_cakephp-csv-migrations
|
train
|
php
|
179f626089eef20288e0e641d346e3252756826b
|
diff --git a/lib/entry/index.js b/lib/entry/index.js
index <HASH>..<HASH> 100644
--- a/lib/entry/index.js
+++ b/lib/entry/index.js
@@ -794,14 +794,34 @@ exports.linkSlave = function(req, socket) { // {{{2
return;
}
- // Request is not tracking data or state, respond with `close()`.
- if (! this.kind) {
+ if (this.kind) {
+ // TODO: p1 => p2 => p3; p1 request entry data and p2 can respond old data when there is no link between p2 entry and p3 entry.
+
+ // Request is not tracking data or state, respond with `close()`.
+
+ Ose.link.close(socket, this.respond(req));
+ return;
+ }
+
+ if (this.shard.atHome()) {
Ose.link.error(socket, M.log.todo('Trying to get entry data while entry is not setup yet!', this));
return;
}
- // TODO: p1 => p2 => p3; p1 request entry data and p2 can respond old data when there is no link between p2 entry and p3 entry.
- Ose.link.close(socket, this.respond(req));
+ this.linkMaster(function(err, entry) {
+ if (err) {
+ Ose.link.error(socket, err);
+
+ if (! entry.removed) {
+ entry.remove();
+ }
+
+ return;
+ }
+
+ Ose.link.close(socket, entry.respond(req));
+ return;
+ });
return;
};
|
Bugfix: Relay entries get request problems
|
OpenSmartEnvironment_ose
|
train
|
js
|
30fedf5e92ebcef6684b5d9b121492b27b962f10
|
diff --git a/src/KGzocha/Searcher/Context/Doctrine/CachedQueryBuilderSearchingContext.php b/src/KGzocha/Searcher/Context/Doctrine/CachedQueryBuilderSearchingContext.php
index <HASH>..<HASH> 100644
--- a/src/KGzocha/Searcher/Context/Doctrine/CachedQueryBuilderSearchingContext.php
+++ b/src/KGzocha/Searcher/Context/Doctrine/CachedQueryBuilderSearchingContext.php
@@ -3,7 +3,7 @@
namespace KGzocha\Searcher\Context\Doctrine;
/**
- * Use this class as a SearchingContext in order to allow all filter imposers
+ * Use this class as a SearchingContext in order to allow all criteria builders
* to work with Doctrine's QueryBuilder, with query caching enabled by default.
*
* @author Daniel Ribeiro <[email protected]>
|
Fixed naming that will come in <I>
|
krzysztof-gzocha_searcher
|
train
|
php
|
c105f642db25e9128328911fcef89cf8cc9fd7ed
|
diff --git a/tools/scenario-player/scenario_player/tasks/blockchain.py b/tools/scenario-player/scenario_player/tasks/blockchain.py
index <HASH>..<HASH> 100644
--- a/tools/scenario-player/scenario_player/tasks/blockchain.py
+++ b/tools/scenario-player/scenario_player/tasks/blockchain.py
@@ -2,7 +2,6 @@ from typing import Any, List, Dict
import structlog
from eth_utils import to_checksum_address, decode_hex, event_abi_to_log_topic
-from raiden_contracts.constants import CONTRACT_TOKEN_NETWORK
from raiden_contracts.contract_manager import ContractManager
from scenario_player.runner import ScenarioRunner
@@ -73,10 +72,11 @@ def query_blockchain_events(
events = web3.eth.getLogs(filter_params)
+ contract_abi = contract_manager.get_contract_abi(contract_name)
return [
decode_event(
- contract_manager.get_contract_abi(contract_name),
- raw_event,
+ abi=contract_abi,
+ log_=raw_event,
)
for raw_event in events
]
@@ -114,7 +114,7 @@ class BlockchainEventFilter(Task):
web3=self.web3,
contract_manager=self._runner.contract_manager,
contract_address=self._runner.token_network_address,
- contract_name=CONTRACT_TOKEN_NETWORK,
+ contract_name=self.contract_name,
topics=[],
from_block=BlockNumber(self._runner.token_deployment_block),
to_block=BlockNumber(self.web3.eth.blockNumber),
|
Use contract name in assert_events
|
raiden-network_raiden
|
train
|
py
|
23e6a63ec0b1f659271f8965a4bd376d0a52f5d3
|
diff --git a/src/main/java/water/H2O.java b/src/main/java/water/H2O.java
index <HASH>..<HASH> 100644
--- a/src/main/java/water/H2O.java
+++ b/src/main/java/water/H2O.java
@@ -54,7 +54,7 @@ public final class H2O {
public static final PrintStream OUT = System.out;
public static final PrintStream ERR = System.err;
- static final int NUMCPUS = Runtime.getRuntime().availableProcessors();
+ public static final int NUMCPUS = Runtime.getRuntime().availableProcessors();
// Convenience error
public static final RuntimeException unimpl() { return new RuntimeException("unimplemented"); }
@@ -403,7 +403,7 @@ public final class H2O {
// A standard FJ Pool, with an expected priority level.
private static class ForkJoinPool2 extends ForkJoinPool {
public final int _priority;
- ForkJoinPool2(int p, int cap) { super(NUMCPUS,new FJWThrFact(cap),null,false); _priority = p; }
+ ForkJoinPool2(int p, int cap) { super(NUMCPUS,new FJWThrFact(cap),null,p!=MIN_PRIORITY); _priority = p; }
public H2OCountedCompleter poll() { return (H2OCountedCompleter)pollSubmission(); }
}
|
LIFO stack-like for FJ-normal queue, FIFO fair-ness for hi-priority queues
|
h2oai_h2o-2
|
train
|
java
|
05f3c54c30a692d62bf263f4d3d5e56d82f1186b
|
diff --git a/keanu-project/src/main/java/io/improbable/keanu/vertices/dbl/KeanuRandom.java b/keanu-project/src/main/java/io/improbable/keanu/vertices/dbl/KeanuRandom.java
index <HASH>..<HASH> 100644
--- a/keanu-project/src/main/java/io/improbable/keanu/vertices/dbl/KeanuRandom.java
+++ b/keanu-project/src/main/java/io/improbable/keanu/vertices/dbl/KeanuRandom.java
@@ -28,7 +28,12 @@ public class KeanuRandom {
* thread that does the load. This causes issues with Apache Math that makes use of Sub-normal values (in
* particular to initialisation values for the BrentOptimizer).
*/
- Thread nd4jInitThread = new Thread(() -> Nd4j.scalar(1.0));
+ Thread nd4jInitThread = new Thread(new Runnable() {
+ @Override
+ public void run() {
+ DoubleTensor a = DoubleTensor.create(1.0, 1.0);
+ }
+ });
nd4jInitThread.start();
try {
nd4jInitThread.join();
|
Used a Runnable rather than a lambda as the Lambda loops forever
|
improbable-research_keanu
|
train
|
java
|
87176648ffd249bb902ef54babdc34afb6c21e87
|
diff --git a/packages/react-atlas-core/src/button/Button.js b/packages/react-atlas-core/src/button/Button.js
index <HASH>..<HASH> 100644
--- a/packages/react-atlas-core/src/button/Button.js
+++ b/packages/react-atlas-core/src/button/Button.js
@@ -59,7 +59,7 @@ const Button = (
const classes = cx(mainStyle, disabledStyle, size);
return (
- <button {...props} className={cx(className)} styleName={classes}>Click</button>
+ <button {...props} className={cx(className)} styleName={classes}>{children}</button>
)
};
|
Fix hardcoded message in button spotetd by nogs.
|
DigitalRiver_react-atlas
|
train
|
js
|
39935165d5465880db0b65e0778f6d40c920885d
|
diff --git a/dropbox-v2/server-validators/python_json.babelg.py b/dropbox-v2/server-validators/python_json.babelg.py
index <HASH>..<HASH> 100644
--- a/dropbox-v2/server-validators/python_json.babelg.py
+++ b/dropbox-v2/server-validators/python_json.babelg.py
@@ -346,7 +346,7 @@ class PythonSDKGenerator(CodeGeneratorMonolingual):
self.emit_line('return')
if is_composite_type(field.data_type):
class_name = self.lang.format_class(field.data_type.name)
- if field.data_type.has_coverage():
+ if is_struct_type(field.data_type) and field.data_type.has_coverage():
self.emit_line('if not isinstance(val, {}):'.format(class_name))
else:
self.emit_line('if type(val) is not {}:'.format(class_name))
|
Fixed has_coverage() check so that it's only called on structs.
|
dropbox_stone
|
train
|
py
|
5891715dc5e0374586a2e47a8eb269e3e052a253
|
diff --git a/client.go b/client.go
index <HASH>..<HASH> 100644
--- a/client.go
+++ b/client.go
@@ -578,6 +578,12 @@ func (cli *Client) RedactEvent(roomID, eventID string, req *ReqRedact) (resp *Re
return
}
+// MarkRead marks eventID in roomID as read, signifying the event, and all before it have been read. See https://matrix.org/docs/spec/client_server/r0.6.0#post-matrix-client-r0-rooms-roomid-receipt-receipttype-eventid
+func (cli *Client) MarkRead(roomID, eventID string) error {
+ urlPath := cli.BuildURL("rooms", roomID, "receipt", "m.read", eventID)
+ return cli.MakeRequest("POST", urlPath, nil, nil)
+}
+
// CreateRoom creates a new Matrix room. See https://matrix.org/docs/spec/client_server/r0.2.0.html#post-matrix-client-r0-createroom
// resp, err := cli.CreateRoom(&gomatrix.ReqCreateRoom{
// Preset: "public_chat",
|
MarkRead method for // MarkRead marks eventID in roomID as read, signifying the event, and all before it have been read. See <URL>
|
matrix-org_gomatrix
|
train
|
go
|
15b30aa6ca46c4bad731794c47368d16b9ba2afa
|
diff --git a/lib/montrose/options.rb b/lib/montrose/options.rb
index <HASH>..<HASH> 100644
--- a/lib/montrose/options.rb
+++ b/lib/montrose/options.rb
@@ -152,7 +152,7 @@ module Montrose
found = send(key)
return found if found
return args.first if args.length == 1
- raise "Key #{key.inspect} not found" unless block_given?
+ raise KeyError, "Key #{key.inspect} not found" unless block_given?
yield
end
diff --git a/spec/montrose/options_spec.rb b/spec/montrose/options_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/montrose/options_spec.rb
+++ b/spec/montrose/options_spec.rb
@@ -839,11 +839,11 @@ describe Montrose::Options do
end
it "raises for no block given and value not found" do
- _(-> { options.fetch(:every) }).must_raise
+ _(-> { options.fetch(:every) }).must_raise(KeyError)
end
it "raises for more than two args" do
- _(-> { options.fetch(:every, nil, nil) }).must_raise
+ _(-> { options.fetch(:every, nil, nil) }).must_raise(ArgumentError)
end
end
|
Raise KeyError for missing key in Options#fetch
|
rossta_montrose
|
train
|
rb,rb
|
e0b1db80483cefd47a890831e84b58d8b8de2cab
|
diff --git a/src/lib/ConfigurationParser.php b/src/lib/ConfigurationParser.php
index <HASH>..<HASH> 100644
--- a/src/lib/ConfigurationParser.php
+++ b/src/lib/ConfigurationParser.php
@@ -44,7 +44,7 @@ trait ConfigurationParser {
{
return "/Users/{$user}";
}
- elseif (str_contains($system, 'win'))
+ elseif (str_contains($system, 'windows'))
{
return "C:\Users\{$user}";
}
@@ -61,7 +61,7 @@ trait ConfigurationParser {
*/
protected function getSystemUser()
{
- if (str_contains(strtolower(php_uname()), 'win')) return getenv('USERNAME');
+ if (str_contains(strtolower(php_uname()), 'windows')) return getenv('USERNAME');
return posix_getpwuid(posix_geteuid())['name'];
}
|
Change word "win" to "windows"
Problem by comparing the word "win" in the "darwin" system can not find the system user
|
laravel_envoy
|
train
|
php
|
d27ee6e6ce180518c64e426c9bf04d37297b1e7a
|
diff --git a/util/src/main/java/org/vesalainen/navi/Navis.java b/util/src/main/java/org/vesalainen/navi/Navis.java
index <HASH>..<HASH> 100644
--- a/util/src/main/java/org/vesalainen/navi/Navis.java
+++ b/util/src/main/java/org/vesalainen/navi/Navis.java
@@ -18,10 +18,7 @@ package org.vesalainen.navi;
import java.util.concurrent.TimeUnit;
import java.util.function.IntToDoubleFunction;
-import java.util.function.Supplier;
-import org.vesalainen.util.function.DoubleBiConsumer;
import org.vesalainen.util.navi.AbstractLocationSupport.LocationFactory;
-import static org.vesalainen.util.navi.Angle.angleDiff;
/**
* Collection of navigational methods etc.
@@ -39,6 +36,15 @@ public final class Navis
@Deprecated public static final double HoursInSeconds = TimeUnit.HOURS.toSeconds(1);
@Deprecated public static final double NMInMetersPerHoursInSecond = NMInMeters / HoursInSeconds;
/**
+ * Converts nautical bearing to cartesian angle.
+ * @param deg
+ * @return
+ */
+ public static double degreesToCartesian(double deg)
+ {
+ return normalizeAngle(360-deg+90);
+ }
+ /**
* Creates center location from coordinates
* @param <T>
* @param factory
|
added degreesToCartesian method
|
tvesalainen_util
|
train
|
java
|
175462491693534dfc8bfa94549f3537611da798
|
diff --git a/mod/quiz/lib.php b/mod/quiz/lib.php
index <HASH>..<HASH> 100644
--- a/mod/quiz/lib.php
+++ b/mod/quiz/lib.php
@@ -2255,8 +2255,8 @@ function quiz_save_multianswer_alternatives
// otherwise the ids of the answers.
if (empty($oldalternativeids)
- or $oldalternatives =
- get_records_list('quiz_answers', 'id', $oldalternativeids))
+ or !($oldalternatives =
+ get_records_list('quiz_answers', 'id', $oldalternativeids)))
{
$oldalternatives = array();
}
@@ -2280,7 +2280,7 @@ function quiz_save_multianswer_alternatives
$alt->answer = $altdata->answer;
$alt->fraction = $altdata->fraction;
$alt->feedback = $altdata->feedback;
- if (! $alt->id = insert_record("quiz_answers", $alt)) {
+ if (!($alt->id = insert_record("quiz_answers", $alt))) {
return false;
}
}
@@ -2319,7 +2319,7 @@ function quiz_save_multianswer_alternatives
delete_records("quiz_answers", "id", $altobsolete->id);
// Possibly obsolute numerical options are also to be deleted:
- delete_records("quiz_numerical", 'answer', $alt->id);
+ delete_records("quiz_numerical", 'answer', $altobsolete->id);
}
// Common alternative options and removal of obsolete options
|
Fixed bug that was pointed out by Tom Cat:
The reuse of old quiz_answers records for question type multianswer/embedded did not work properly.
|
moodle_moodle
|
train
|
php
|
a3a105dcc90b7ac778665027455e25d687b855ab
|
diff --git a/h2o-core/src/main/java/water/init/JarHash.java b/h2o-core/src/main/java/water/init/JarHash.java
index <HASH>..<HASH> 100644
--- a/h2o-core/src/main/java/water/init/JarHash.java
+++ b/h2o-core/src/main/java/water/init/JarHash.java
@@ -68,10 +68,14 @@ public abstract class JarHash {
public static InputStream getResource2(String uri) {
try {
// Jar file mode.
- InputStream is = ClassLoader.getSystemClassLoader().getResourceAsStream("resources/www" + uri);
+ InputStream is = null;
+ is = ClassLoader.getSystemClassLoader().getResourceAsStream("resources/www" + uri);
+ if( is != null ) return is;
+ is = ClassLoader.getSystemClassLoader().getResourceAsStream("resources/main/www" + uri);
+ if( is != null ) return is;
+ // This is the right file location of resource inside jar bundled by gradle
+ is = ClassLoader.getSystemClassLoader().getResourceAsStream("www" + uri);
if( is != null ) return is;
- InputStream is2 = ClassLoader.getSystemClassLoader().getResourceAsStream("resources/main/www" + uri);
- if( is2 != null ) return is2;
// That failed, so try all registered locations
for( File f : RESOURCE_FILES ) {
File f2 = new File(f,uri);
|
Proper www-resources resolution respecting gradle assembly structure.
|
h2oai_h2o-3
|
train
|
java
|
474e08bd57862fdef9cff3fcdf3bc3d5d144c6ed
|
diff --git a/tests/BuilderTest.php b/tests/BuilderTest.php
index <HASH>..<HASH> 100644
--- a/tests/BuilderTest.php
+++ b/tests/BuilderTest.php
@@ -46,9 +46,9 @@ final class BuilderTest extends TestCase
*/
public function testGithubIssue365(): void
{
- $data = 'test';
- $size = 300;
- $label = 'label';
+ $data = 'Ahmad';
+ $label = '';
+ $size = 120;
$result = Builder::create()
->data($data)
|
Update vars to correspond with failing case
|
endroid_qr-code
|
train
|
php
|
a2dd66aac10584bf505bbb39b70cb838172f82d5
|
diff --git a/spec/integration/indexing_spec.rb b/spec/integration/indexing_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/integration/indexing_spec.rb
+++ b/spec/integration/indexing_spec.rb
@@ -24,7 +24,7 @@ describe 'indexing' do
let(:db_name) { Perpetuity.configuration.data_source.db }
before do
- Perpetuity.data_source :mongodb, db_name
+ load './spec/spec_helper.rb'
mapper.data_source.drop_collection Object
end
after { mapper.data_source.drop_collection Object }
|
Defer to spec_helper to configure DB for indexes
|
jgaskins_perpetuity
|
train
|
rb
|
f87f3cd512820553b7195ac81be1a11d66e33c5c
|
diff --git a/src/Connections/DBALConnection.php b/src/Connections/DBALConnection.php
index <HASH>..<HASH> 100644
--- a/src/Connections/DBALConnection.php
+++ b/src/Connections/DBALConnection.php
@@ -38,4 +38,10 @@ class DBALConnection implements Connection
$schema = $this->connection->getSchemaManager();
return $schema->listTableDetails($table)->getPrimaryKeyColumns()[0];
}
+
+ public function registerPlatformType(string $platformType, string $dbalType)
+ {
+ $schema = $this->connection->getSchemaManager();
+ $schema->getDatabasePlatform()->registerDoctrineTypeMapping($platformType, $dbalType);
+ }
}
|
Add ability to configure a platform-specific type.
|
ComPHPPuebla_dbal-fixtures
|
train
|
php
|
f0bc90a805ddf7ec2dd6e641cee9e1543244cbfb
|
diff --git a/src/Generator/Code/Chunks.php b/src/Generator/Code/Chunks.php
index <HASH>..<HASH> 100644
--- a/src/Generator/Code/Chunks.php
+++ b/src/Generator/Code/Chunks.php
@@ -57,6 +57,16 @@ class Chunks
}
/**
+ * @param Chunks $chunks
+ */
+ public function merge(Chunks $chunks)
+ {
+ foreach ($chunks->getChunks() as $identifier => $code) {
+ $this->append($identifier, $code);
+ }
+ }
+
+ /**
* @return string|null
*/
public function getNamespace(): ?string
@@ -73,6 +83,23 @@ class Chunks
}
/**
+ * Writes this chunk collection as zip archive to the provided file
+ *
+ * @param string $file
+ */
+ public function writeTo(string $file)
+ {
+ $zip = new \ZipArchive();
+ $zip->open($file, \ZipArchive::CREATE);
+
+ foreach ($this->getChunks() as $identifier => $code) {
+ $zip->addFromString($identifier, $code);
+ }
+
+ $zip->close();
+ }
+
+ /**
* @return string
*/
public function __toString()
|
add merge method and write to zip archive
|
apioo_psx-schema
|
train
|
php
|
5aa879135732e4546f1093237795a558749cb811
|
diff --git a/lib/index.js b/lib/index.js
index <HASH>..<HASH> 100644
--- a/lib/index.js
+++ b/lib/index.js
@@ -710,8 +710,9 @@ UFDS.prototype._getUser = function _getUser(filter, msg, account, cb, noCache) {
};
self.search(inputDN, opts, function (gErr, gRes) {
var field = gRes ? gRes[0].uniquemember : null;
- if (Array.isArray(field) &&
- field.indexOf(compareDN) !== -1) {
+ if ((Array.isArray(field) &&
+ field.indexOf(compareDN) !== -1) ||
+ (field === compareDN)) {
result.memberof.push(inputDN);
}
subCB();
|
CAPI-<I>: Fix edge case in groups queries
The user.memberof field would be populated improperly if either of the
queried groups had only one member.
|
joyent_node-ufds
|
train
|
js
|
6d5e32bca8394b6bc15431b88666cc3181d99fc7
|
diff --git a/js/models/threads.js b/js/models/threads.js
index <HASH>..<HASH> 100644
--- a/js/models/threads.js
+++ b/js/models/threads.js
@@ -40,9 +40,11 @@ var Whisper = Whisper || {};
},
messages: function() {
- var messages = new Whisper.MessageCollection([], {threadId: this.id});
- messages.fetch();
- return messages;
+ if (!this.messageCollection) {
+ this.messageCollection = new Whisper.MessageCollection([], {threadId: this.id});
+ }
+ this.messageCollection.fetch();
+ return this.messageCollection;
},
});
|
Don't make a new collection on every call to thread.messages()
|
ForstaLabs_librelay-node
|
train
|
js
|
fd2275316ab5e5da8145bfcaba9dce28690cf838
|
diff --git a/src/frontend/org/voltdb/TheHashinator.java b/src/frontend/org/voltdb/TheHashinator.java
index <HASH>..<HASH> 100644
--- a/src/frontend/org/voltdb/TheHashinator.java
+++ b/src/frontend/org/voltdb/TheHashinator.java
@@ -93,7 +93,7 @@ public abstract class TheHashinator {
*/
public static TheHashinator getHashinator(Class<? extends TheHashinator> hashinatorImplementation,
byte config[]) {
- return constructHashinator(hashinatorImplementation, config);
+ return constructHashinator(hashinatorImplementation, config, false);
}
/**
|
Update non-static hashinator to use new cooked.
|
VoltDB_voltdb
|
train
|
java
|
b92d529e775dc925577fc481d148ec62c13ab1d4
|
diff --git a/azure-maven-plugin-lib/src/main/java/com/microsoft/azure/maven/utils/MavenAuthUtils.java b/azure-maven-plugin-lib/src/main/java/com/microsoft/azure/maven/utils/MavenAuthUtils.java
index <HASH>..<HASH> 100644
--- a/azure-maven-plugin-lib/src/main/java/com/microsoft/azure/maven/utils/MavenAuthUtils.java
+++ b/azure-maven-plugin-lib/src/main/java/com/microsoft/azure/maven/utils/MavenAuthUtils.java
@@ -80,6 +80,7 @@ public class MavenAuthUtils {
putPropertyIfNotExist("org.slf4j.simpleLogger.log.com.microsoft.aad.adal4j", "off");
putPropertyIfNotExist("org.slf4j.simpleLogger.log.com.azure.core.credential", "off");
putPropertyIfNotExist("org.slf4j.simpleLogger.log.com.microsoft.aad.msal4jextensions", "off");
+ putPropertyIfNotExist("org.slf4j.simpleLogger.log.com.azure.core.implementation", "warn");
}
private static void putPropertyIfNotExist(String key, String value) {
|
Disable the annoying log "Acquired a new access token" in AccessTokenCache during login
|
Microsoft_azure-maven-plugins
|
train
|
java
|
83935aabc4cff6220a71ccd6594db31a771d1759
|
diff --git a/app/openbel/api/helpers/translators.rb b/app/openbel/api/helpers/translators.rb
index <HASH>..<HASH> 100644
--- a/app/openbel/api/helpers/translators.rb
+++ b/app/openbel/api/helpers/translators.rb
@@ -7,17 +7,19 @@ module OpenBEL
# type.
module Translators
- # Patch {::Sinatra::Helpers::Stream} to respect the +puts+ and +write+
- # method. This is necessary because the RDF.rb writers will call theseon
- # the IO object (in this case {::Sinatra::Helpers::Stream}).
+ # Open {::Sinatra::Helpers::Stream} and add the +puts+, +write+, and
+ # +flush+ methods. This is necessary because the RDF.rb writers will call
+ # these methods on the IO (in this case {::Sinatra::Helpers::Stream}).
class ::Sinatra::Helpers::Stream
+ # Write each string in +args*, new-line delimited, to the stream.
def puts(*args)
self << (
args.map { |string| "#{string.encode(Encoding::UTF_8)}\n" }.join
)
end
+ # Write the string to the stream.
def write(string)
self << string.encode(Encoding::UTF_8)
end
|
added doc for opening ::Sinatra::Helpers::Stream
It is important to convey why methods were added to this class. The
methods are a convenience so RDF.rb's writers can expect to call them.
|
OpenBEL_openbel-api
|
train
|
rb
|
81d5fc8e501981363803e5180cdb408a34d29a72
|
diff --git a/util/server.js b/util/server.js
index <HASH>..<HASH> 100644
--- a/util/server.js
+++ b/util/server.js
@@ -4,6 +4,7 @@
var browserify = require('browserify');
var sass = require('node-sass');
var fs = require('fs');
+var path = require('path');
var each = require('lodash/each');
var isString = require('lodash/isString');
var Configurator = require('./Configurator');
@@ -78,8 +79,10 @@ server.serveStyles = function(expressApp, route, props) {
var ConfiguratorClass = props.ConfiguratorClass || Configurator;
var configurator = new ConfiguratorClass(config);
var scssFiles = configurator.getStyles();
+ var cwd = process.cwd();
var scssContent = scssFiles.map(function(scssFile) {
- return "@import '"+scssFile+"';";
+ var relPath = String(path.relative(cwd, scssFile)).split(path.sep).join('/');
+ return "@import '"+relPath+"';";
}).join('\n');
sassOptions.data = scssContent;
} else {
|
Use '/' paths in generated sass.
|
substance_substance
|
train
|
js
|
474c8df63d80cb79db6281dc0137f29103a6985c
|
diff --git a/google/oauth2/flow.py b/google/oauth2/flow.py
index <HASH>..<HASH> 100644
--- a/google/oauth2/flow.py
+++ b/google/oauth2/flow.py
@@ -14,6 +14,10 @@
"""OAuth 2.0 Authorization Flow
+.. warning::
+ This module is experimental and is subject to change signficantly
+ within major version releases.
+
This module provides integration with `requests-oauthlib`_ for running the
`OAuth 2.0 Authorization Flow`_ and acquiring user credentials.
|
Add warning about oauth2.flow (#<I>)
|
googleapis_google-auth-library-python
|
train
|
py
|
bed66d2b7647c4a5a8213a1ee19c108ff3f13781
|
diff --git a/ajaxSelectPicker.js b/ajaxSelectPicker.js
index <HASH>..<HASH> 100644
--- a/ajaxSelectPicker.js
+++ b/ajaxSelectPicker.js
@@ -30,7 +30,8 @@
ajaxOptions: {}, //if you want to change the dataType, data, or request type set it here. default [json, {q: searchBoxVal}, POST],
placeHolderOption: null, // string with text to show
debug: false, //If you want console output, set this to true
- mixWithCurrents: false // If you want to mix results with currently selected results to avoid losing them
+ mixWithCurrents: false, // If you want to mix results with currently selected results to avoid losing them
+ loadingTemplate: '<div class="menu-loading">Loading...</div>' // If you need to override the template used for when the loading text is shown.
};
var plugin = this,
@@ -86,7 +87,7 @@
plugin.destroyLi();
//show loading message
- plugin.$menu.append('<div class="menu-loading">loading...</div>');
+ plugin.$menu.append(plugin.ajaxOptions.loadingTemplate);
var ajaxParams = {};
ajaxParams.url = plugin.ajaxOptions.ajaxSearchUrl;
|
Make the loading element into a template so it can be overridden.
|
truckingsim_Ajax-Bootstrap-Select
|
train
|
js
|
7baf1e549ea2c08efb99216e3b5a9ef673aed90a
|
diff --git a/aws/resource_aws_ses_identity_notification_topic_test.go b/aws/resource_aws_ses_identity_notification_topic_test.go
index <HASH>..<HASH> 100644
--- a/aws/resource_aws_ses_identity_notification_topic_test.go
+++ b/aws/resource_aws_ses_identity_notification_topic_test.go
@@ -25,6 +25,7 @@ func TestAccAwsSESIdentityNotificationTopic_basic(t *testing.T) {
testAccPreCheck(t)
testAccPreCheckAWSSES(t)
},
+ ErrorCheck: testAccErrorCheck(t, ses.EndpointsID),
Providers: testAccProviders,
CheckDestroy: testAccCheckAwsSESIdentityNotificationTopicDestroy,
Steps: []resource.TestStep{
|
tests/r/ses_identity_notification_topic: Add ErrorCheck
|
terraform-providers_terraform-provider-aws
|
train
|
go
|
4c6512696518edc06e8f8338e5868a0ac1320c34
|
diff --git a/app/models/message.rb b/app/models/message.rb
index <HASH>..<HASH> 100644
--- a/app/models/message.rb
+++ b/app/models/message.rb
@@ -134,7 +134,7 @@ class Message
return conditions if conditions_only
- conditions.default_scope.limit(LIMIT).paginate(:page => page)
+ conditions.default_scope.paginate(:page => page, :per_page => LIMIT)
end
def self.extract_additional_from_quickfilter(filters)
|
- fixes bug where pagination would not work when a quickfilter is used (as described in <URL>)
|
Graylog2_graylog2-server
|
train
|
rb
|
ac604f25a148afe05bc134327704731140442742
|
diff --git a/cmd/minikube/cmd/start_flags.go b/cmd/minikube/cmd/start_flags.go
index <HASH>..<HASH> 100644
--- a/cmd/minikube/cmd/start_flags.go
+++ b/cmd/minikube/cmd/start_flags.go
@@ -144,7 +144,7 @@ func initMinikubeFlags() {
startCmd.Flags().Bool(preload, true, "If set, download tarball of preloaded images if available to improve start time. Defaults to true.")
startCmd.Flags().Bool(deleteOnFailure, false, "If set, delete the current cluster if start fails and try again. Defaults to false.")
startCmd.Flags().Bool(forceSystemd, false, "If set, force the container runtime to use sytemd as cgroup manager. Currently available for docker and crio. Defaults to false.")
- startCmd.Flags().String(startOutput, "text", "Format to print stdout in. Options include: [text,json]")
+ startCmd.Flags().StringP(startOutput, "o", "text", "Format to print stdout in. Options include: [text,json]")
}
// initKubernetesFlags inits the commandline flags for Kubernetes related options
|
feature: added shorthand for start command output option
|
kubernetes_minikube
|
train
|
go
|
b20ffcf048901250f01eb58ae09a1565776906d1
|
diff --git a/chatterbot/storage/storage_adapter.py b/chatterbot/storage/storage_adapter.py
index <HASH>..<HASH> 100644
--- a/chatterbot/storage/storage_adapter.py
+++ b/chatterbot/storage/storage_adapter.py
@@ -24,6 +24,11 @@ class StorageAdapter(object):
# The string must be lowercase
model_name = model_name.lower()
+ kwarg_model_key = '%s_model' % (model_name, )
+
+ if kwarg_model_key in self.kwargs:
+ return self.kwargs.get(kwarg_model_key)
+
get_model_method = getattr(self, 'get_%s_model' % (model_name, ))
return get_model_method()
|
Allow model classes to be overriden
|
gunthercox_ChatterBot
|
train
|
py
|
8fa27d57bf7fab6baf0926d513062fe9c213e881
|
diff --git a/host-controller/src/main/java/org/jboss/as/host/controller/ServerInventoryImpl.java b/host-controller/src/main/java/org/jboss/as/host/controller/ServerInventoryImpl.java
index <HASH>..<HASH> 100644
--- a/host-controller/src/main/java/org/jboss/as/host/controller/ServerInventoryImpl.java
+++ b/host-controller/src/main/java/org/jboss/as/host/controller/ServerInventoryImpl.java
@@ -198,6 +198,22 @@ public class ServerInventoryImpl implements ServerInventory {
public ServerStatus restartServer(String serverName, final int gracefulTimeout, final ModelNode domainModel) {
stopServer(serverName, gracefulTimeout);
+ ServerStatus status;
+ // FIXME total hack; set up some sort of notification scheme
+ for (int i = 0; i < 50; i++) {
+ status = determineServerStatus(serverName);
+ if (status == ServerStatus.STOPPING) {
+ try {
+ Thread.sleep(100);
+ } catch (final InterruptedException e) {
+ Thread.currentThread().interrupt();
+ break;
+ }
+ }
+ else {
+ break;
+ }
+ }
return startServer(serverName, domainModel);
}
|
Ugly fix for domain managed server restart handling
was: 3e1a6d<I>f<I>b6ffce3f7f4bf<I>a4c<I>
|
wildfly_wildfly-core
|
train
|
java
|
a8b24debb840d2574748d84ac7cc25aa6572603b
|
diff --git a/elasticsearch/client/indices.py b/elasticsearch/client/indices.py
index <HASH>..<HASH> 100644
--- a/elasticsearch/client/indices.py
+++ b/elasticsearch/client/indices.py
@@ -766,3 +766,24 @@ class IndicesClient(NamespacedClient):
params=params)
return data
+ @query_params('active_only', 'detailed', 'human')
+ def recovery(self, index=None, params=None):
+ """
+ The indices recovery API provides insight into on-going shard
+ recoveries. Recovery status may be reported for specific indices, or
+ cluster-wide.
+ `<http://www.elasticsearch.org/guide/en/elasticsearch/reference/master/indices-recovery.html>`_
+
+ :arg index: A comma-separated list of index names; use `_all` or empty
+ string to perform the operation on all indices
+ :arg active_only: Display only those recoveries that are currently on-
+ going (default: 'false')
+ :arg detailed: Whether to display detailed information about shard
+ recovery (default: 'false')
+ :arg human: Whether to return time and byte values in human-readable
+ format. (default: 'false')
+
+ """
+ _, data = self.transport.perform_request('GET', _make_path(index,
+ '_recovery'), params=params)
+ return data
|
Added indices.recovery api
|
elastic_elasticsearch-py
|
train
|
py
|
dd07a9b0cd45e0e4013e715179a67e30f3e478e4
|
diff --git a/ssdeep.js b/ssdeep.js
index <HASH>..<HASH> 100644
--- a/ssdeep.js
+++ b/ssdeep.js
@@ -178,17 +178,19 @@
rh.update(thisByte);
- if (i === (len - 1) || rh.sum() % triggerValue === (triggerValue - 1)) {
+ if (signatures[0].length < 63 && rh.sum() % triggerValue === (triggerValue - 1)) {
signatures[0] += B64.charAt(h1&63);
signatures[2] = triggerValue;
h1 = HASH_INIT;
}
- if (i === (len - 1) || rh.sum() % (triggerValue * 2) === (triggerValue * 2 - 1) ) {
+ if (signatures[1].length < 31 && rh.sum() % (triggerValue * 2) === (triggerValue * 2 - 1) ) {
signatures[1] += B64.charAt(h2&63);
signatures[2] = triggerValue;
h2 = HASH_INIT;
}
}
+ signatures[0] += B64.charAt(h1&63);
+ signatures[1] += B64.charAt(h2&63);
return signatures;
}
|
Fixed #2
- Limited individual hash length to <I> and (<I>/2 - 1) characters
> Based on <URL>
|
cloudtracer_ssdeep.js
|
train
|
js
|
1bb4d0faf6650264300594f697c4a9896aade3f4
|
diff --git a/JSAT/src/jsat/io/JSATData.java b/JSAT/src/jsat/io/JSATData.java
index <HASH>..<HASH> 100644
--- a/JSAT/src/jsat/io/JSATData.java
+++ b/JSAT/src/jsat/io/JSATData.java
@@ -799,6 +799,7 @@ public class JSATData
}
for(int i = 0; i < weights.size(); i++)
toRet.setWeight(i, weights.getD(i));
+ store.finishAdding();
return toRet;
}
|
bug fix forgot to call finishAdding!
|
EdwardRaff_JSAT
|
train
|
java
|
24fa587c2d2358fca60e9e95cfc165212864c0c6
|
diff --git a/twisted/plugins/startnode.py b/twisted/plugins/startnode.py
index <HASH>..<HASH> 100644
--- a/twisted/plugins/startnode.py
+++ b/twisted/plugins/startnode.py
@@ -3,14 +3,8 @@ from __future__ import print_function
import sys
from twisted.application.service import IServiceMaker, MultiService
-from twisted.application.internet import TCPServer
-from twisted.cred.portal import Portal
-from twisted.cred.checkers import InMemoryUsernamePasswordDatabaseDontUse
from twisted.plugin import IPlugin
from twisted.python import usage
-from twisted.conch.manhole import ColoredManhole
-from twisted.conch.manhole_ssh import ConchFactory, TerminalRealm
-from twisted.conch.insults.insults import ServerProtocol
from zope.interface import implements
@@ -153,6 +147,13 @@ class ActorRunnerMaker(object):
return m
def make_manhole_server(self, port, username, password):
+ from twisted.application.internet import TCPServer
+ from twisted.cred.portal import Portal
+ from twisted.cred.checkers import InMemoryUsernamePasswordDatabaseDontUse
+ from twisted.conch.manhole import ColoredManhole
+ from twisted.conch.manhole_ssh import ConchFactory, TerminalRealm
+ from twisted.conch.insults.insults import ServerProtocol
+
rlm = TerminalRealm()
rlm.chainedProtocolFactory = lambda: ServerProtocol(ColoredManhole, None)
|
Moved imports that require pycrypto/pyasn1 in startnode to the code that needs it, so that pycrypto and pyasn1 would be optional
|
eallik_spinoff
|
train
|
py
|
35a584234b00297dc511300bb6e42eeaceac8345
|
diff --git a/lib/server.js b/lib/server.js
index <HASH>..<HASH> 100644
--- a/lib/server.js
+++ b/lib/server.js
@@ -417,12 +417,12 @@ class Server extends KarmaEventEmitter {
}
processWrapper.on('unhandledRejection', (error) => {
- this.log.error(`UnhandledRejection: ${error.message || String(error)}`)
+ this.log.error(`UnhandledRejection: ${error.stack || error.message || String(error)}`)
reportError(error)
})
processWrapper.on('uncaughtException', (error) => {
- this.log.error(`UncaughtException:: ${error.message || String(error)}`)
+ this.log.error(`UncaughtException: ${error.stack || error.message || String(error)}`)
reportError(error)
})
}
|
feat(server): print stack of unhandledrejections (#<I>)
* feat(server): print stack of unhandledrejections
The v8 engine includes the error with the stack and most of the time we
need the stack also.
|
karma-runner_karma
|
train
|
js
|
8cf10aff9713656aa3d99e5d7c73acf1f89b0158
|
diff --git a/test/pulsar.js b/test/pulsar.js
index <HASH>..<HASH> 100644
--- a/test/pulsar.js
+++ b/test/pulsar.js
@@ -68,6 +68,20 @@ describe('tests of pulsar API', function() {
});
});
+ it('check if task can be got after it is created', function(done) {
+ var task = this.pulsar.createTask({
+ app: taskArgs.app.example,
+ env: taskArgs.env.production,
+ action: taskArgs.action.dummySleepy
+ });
+
+ var pulsar = new Pulsar(this.pulsarDb, {'repo': 'test/data/pulsar-conf-dummy/'});
+ pulsar.getTask(task.id, function(err, result) {
+ assert.deepEqual(result.getData(), task.getData());
+ done();
+ });
+ });
+
it('check if created task in the list of current tasks of pulsar', function() {
var task = this.pulsar.createTask({
app: taskArgs.app.example,
|
Add test which shows bug in PulsarExec constructor.
|
cargomedia_pulsar-rest-api
|
train
|
js
|
8ed5837f2fa7a2a4606d7bca108a3550bf08e0ec
|
diff --git a/lib/arg-parser/dsl.rb b/lib/arg-parser/dsl.rb
index <HASH>..<HASH> 100644
--- a/lib/arg-parser/dsl.rb
+++ b/lib/arg-parser/dsl.rb
@@ -41,6 +41,11 @@ module ArgParser
@args_def ||= ArgParser::Definition.new
end
+ # Returns true if any arguments have been defined
+ def args_defined?
+ @args_def && @args_def.args.size > 0
+ end
+
# Sets the title that will appear in the Usage output generated from
# the Definition.
def title(val)
|
Add #args_defined? helper to determine if any arguments were added via DSL
|
agardiner_arg-parser
|
train
|
rb
|
21dd12c1574f56132ac093fd26d4f336f86c067c
|
diff --git a/src/Imper86/Core/DateTime.php b/src/Imper86/Core/DateTime.php
index <HASH>..<HASH> 100644
--- a/src/Imper86/Core/DateTime.php
+++ b/src/Imper86/Core/DateTime.php
@@ -28,6 +28,11 @@ class DateTime extends \DateTime
parent::__construct($time, $timezone);
}
+ public function __toString()
+ {
+ return $this->format();
+ }
+
public function formatPL(string $format = 'Y-m-d H:i:s')
{
$oldFormat = $format;
|
magic method __toString added
|
imper86_core
|
train
|
php
|
1c42da5bef9a60898f2827dac7150093ad29e8d2
|
diff --git a/cgo.go b/cgo.go
index <HASH>..<HASH> 100644
--- a/cgo.go
+++ b/cgo.go
@@ -278,9 +278,6 @@ func rungcc3(ctx *Context, dir string, ofile string, ofiles []string) error {
return nil
}
args = append(args, libgcc)
- } else {
- // explicitly disable build-id when using clang
- args = append(args, "-Wl,--build-id=none")
}
t0 := time.Now()
err := run(dir, nil, gcc(), args...)
|
remove clang specific build-id hack, nobody cares
|
constabulary_gb
|
train
|
go
|
7fb78508bb5052dcf7b99e97e71ec311316824b0
|
diff --git a/sunspot/lib/sunspot/schema.rb b/sunspot/lib/sunspot/schema.rb
index <HASH>..<HASH> 100644
--- a/sunspot/lib/sunspot/schema.rb
+++ b/sunspot/lib/sunspot/schema.rb
@@ -23,7 +23,8 @@ module Sunspot
FieldType.new('slong', 'SortableLong', 'l'),
FieldType.new('tint', 'TrieInteger', 'it'),
FieldType.new('tfloat', 'TrieFloat', 'ft'),
- FieldType.new('tdate', 'TrieInt', 'dt')
+ FieldType.new('tdate', 'TrieInt', 'dt'),
+ FieldType.new('daterange', 'DateRange', 'dr')
]
|
Add daterange to sunspot/schema
|
sunspot_sunspot
|
train
|
rb
|
93a6bc91e7fa1487d15386425212f544e0fad737
|
diff --git a/psyplot/data.py b/psyplot/data.py
index <HASH>..<HASH> 100755
--- a/psyplot/data.py
+++ b/psyplot/data.py
@@ -917,6 +917,7 @@ class CFDecoder(object):
if not coord_names:
return
ret = []
+ matched = []
for coord in map(lambda dim: coords[dim], filter(
lambda dim: dim in coords, chain(
coord_names, var.dims))):
@@ -924,12 +925,18 @@ class CFDecoder(object):
# list of possible coordinate names
if coord.name not in (c.name for c in ret):
if coord.name in getattr(self, axis):
- ret.clear()
- ret.append(coord)
- break
+ matched.append(coord)
elif coord.attrs.get('axis', '').lower() == axis:
ret.append(coord)
- if ret:
+ if matched:
+ if len(matched) > 1:
+ warn("Found multiple matches for %s coordinate in the "
+ "coordinates: %s. I use %s" % (
+ axis, ', '.join([c.name for c in matched]),
+ matched[0].name),
+ PsyPlotRuntimeWarning)
+ return matched[0]
+ elif ret:
return None if len(ret) > 1 else ret[0]
# If the coordinates attribute is specified but the coordinate
# variables themselves have no 'axis' attribute, we interpret the
|
warn when dimension list matches multiples
This commit treats matches with dimension lists
(such as CFDecoder.x) different than the
coordinates or axis attribute.
|
Chilipp_psyplot
|
train
|
py
|
4edc080750cc715585a5570b6a0d4da7a13bc3c7
|
diff --git a/src/cordova/plugin/add.js b/src/cordova/plugin/add.js
index <HASH>..<HASH> 100644
--- a/src/cordova/plugin/add.js
+++ b/src/cordova/plugin/add.js
@@ -87,7 +87,9 @@ function add (projectRoot, hooksRunner, opts) {
link: opts.link,
pluginInfoProvider: pluginInfoProvider,
variables: opts.cli_variables,
- is_top_level: true
+ is_top_level: true,
+ save_exact: opts['save-exact'] || false,
+ production: opts.production
};
return module.exports.determinePluginTarget(projectRoot, cfg, target, fetchOptions).then(function (resolvedTarget) {
@@ -121,7 +123,9 @@ function add (projectRoot, hooksRunner, opts) {
// files platform_www directory, so they'll be applied to www on each prepare.
usePlatformWww: true,
nohooks: opts.nohooks,
- force: opts.force
+ force: opts.force,
+ save_exact: opts['save-exact'] || false,
+ production: opts.production
};
events.emit('verbose', 'Calling plugman.install on plugin "' + pluginInfo.dir + '" for platform "' + platform);
|
CB-<I> : added save_exact and production opts
This closes #<I>
|
apache_cordova-lib
|
train
|
js
|
8aa9bb6375ffabe20724698158c87e4dd80a5fff
|
diff --git a/client/webpack.config.js b/client/webpack.config.js
index <HASH>..<HASH> 100644
--- a/client/webpack.config.js
+++ b/client/webpack.config.js
@@ -211,10 +211,19 @@ const webpackConfig = {
config: false,
plugins: [ autoprefixerPlugin() ],
},
- prelude: `@use '${ path.join(
- __dirname,
- 'assets/stylesheets/shared/_utils.scss'
- ) }' as *;`,
+ // Since `prelude` string will be appended to each Sass file
+ // We need to ensure that the import path (inside a sass file) is a posix path, regardless of the OS/platform
+ // Final result should be something like `@use 'client/assets/stylesheets/shared/_utils.scss' as *;`
+ prelude: `@use '${
+ path
+ // Path, relative to Node CWD
+ .relative(
+ process.cwd(),
+ path.join( __dirname, 'assets/stylesheets/shared/_utils.scss' )
+ )
+ .split( path.sep ) // Break any path (posix/win32) by path separator
+ .join( path.posix.sep ) // Convert the path explicitly to posix to ensure imports work fine
+ }' as *;`,
} ),
{
include: path.join( __dirname, 'sections.js' ),
|
Fix Sass build on Windows (#<I>)
|
Automattic_wp-calypso
|
train
|
js
|
09fde57335ad2f0262fee8dd5ec976232acfbbe1
|
diff --git a/FCMClient.php b/FCMClient.php
index <HASH>..<HASH> 100644
--- a/FCMClient.php
+++ b/FCMClient.php
@@ -80,9 +80,41 @@ class FCMClient
}
/**
+ * Subscribe devices to a Topic
+ *
+ * @param null $topicId
+ * @param array $deviceTokens
+ * @return \Psr\Http\Message\ResponseInterface
+ */
+ public function subscribeDevicesToTopic($topicId = null, $deviceTokens = array())
+ {
+ if(!$topicId || empty($deviceTokens)){
+ throw new \InvalidArgumentException("Please check arguments!");
+ }
+
+ return $this->client->addTopicSubscription($topicId, $deviceTokens);
+ }
+
+ /**
+ * Remove devices from a Topic
+ *
+ * @param null $topicId
+ * @param array $deviceTokens
+ * @return \Psr\Http\Message\ResponseInterface
+ */
+ public function removeDevicesFromTopic($topicId = null, $deviceTokens = array())
+ {
+ if(!$topicId || empty($deviceTokens)){
+ throw new \InvalidArgumentException("Please check arguments!");
+ }
+
+ return $this->client->removeTopicSubscription($topicId, $deviceTokens);
+ }
+
+ /**
* @param DeviceNotification | TopicNotification $notification
*
- * @return Client
+ * @return \Psr\Http\Message\ResponseInterface
*/
public function sendNotification($notification)
{
|
Add "ubscribe and unsubscribe devices in a Topic" feature
|
redjanym_FCMBundle
|
train
|
php
|
15ca8397abc31652aa1c8f44117412a60632b577
|
diff --git a/lib/roo/csv.rb b/lib/roo/csv.rb
index <HASH>..<HASH> 100644
--- a/lib/roo/csv.rb
+++ b/lib/roo/csv.rb
@@ -1,5 +1,6 @@
require 'rubygems'
require 'csv'
+require 'time'
# The Csv class can read csv files (must be separated with commas) which then
# can be handled like spreadsheets. This means you can access cells like A5
diff --git a/lib/roo/generic_spreadsheet.rb b/lib/roo/generic_spreadsheet.rb
index <HASH>..<HASH> 100644
--- a/lib/roo/generic_spreadsheet.rb
+++ b/lib/roo/generic_spreadsheet.rb
@@ -1,6 +1,7 @@
# encoding: utf-8
require 'tmpdir'
+require 'stringio'
# Base class for all other types of spreadsheets
class Roo::GenericSpreadsheet
diff --git a/test/test_helper.rb b/test/test_helper.rb
index <HASH>..<HASH> 100644
--- a/test/test_helper.rb
+++ b/test/test_helper.rb
@@ -5,6 +5,7 @@ require 'test/unit'
require 'fileutils'
require 'timeout'
require 'logger'
+require 'date'
# require gem files
require File.dirname(__FILE__) + '/../lib/roo'
|
add a couple requires to get tests running under <I>
|
roo-rb_roo
|
train
|
rb,rb,rb
|
197bf0b07c5cbad211b387b9599c94e8a2650f29
|
diff --git a/spec/support/vcr.rb b/spec/support/vcr.rb
index <HASH>..<HASH> 100644
--- a/spec/support/vcr.rb
+++ b/spec/support/vcr.rb
@@ -3,6 +3,9 @@ require 'vcr'
VCR.configure do |config|
config.cassette_library_dir = "spec/fixtures/vcr_cassettes"
config.hook_into :webmock # or :fakeweb
+ config.default_cassette_options = {
+ :match_requests_on => [:method, :uri, :body_as_json]
+ }
end
RSpec.configure do |c|
|
Break everything!
Many cassettes were using outdated requests. These need to be updated,
it's better to have broken tests than those which just seem to be
working but have wrong fixtures.
|
Jesus_dropbox_api
|
train
|
rb
|
50eea4c0ecace3e2105520318ea12c523809c693
|
diff --git a/gobblin-core/src/main/java/org/apache/gobblin/source/extractor/extract/restapi/RestApiConnector.java b/gobblin-core/src/main/java/org/apache/gobblin/source/extractor/extract/restapi/RestApiConnector.java
index <HASH>..<HASH> 100644
--- a/gobblin-core/src/main/java/org/apache/gobblin/source/extractor/extract/restapi/RestApiConnector.java
+++ b/gobblin-core/src/main/java/org/apache/gobblin/source/extractor/extract/restapi/RestApiConnector.java
@@ -167,7 +167,7 @@ public abstract class RestApiConnector {
}
if (status.getStatusCode() >= 400) {
- log.info("Unable to get response using: {} got status code {}", url, status.getStatusCode());
+ log.info("Unable to get response using {} with status code {}: {}", url, status.getStatusCode(), jsonStr);
JsonElement jsonRet = GSON.fromJson(jsonStr, JsonArray.class);
throw new RestApiProcessingException(getFirstErrorMessage("Failed to retrieve response from ", jsonRet));
}
|
Log json response for connector request failure (#<I>)
|
apache_incubator-gobblin
|
train
|
java
|
1099639801bb986f3c25c12fd5e927d1ccff4621
|
diff --git a/src/legend.js b/src/legend.js
index <HASH>..<HASH> 100644
--- a/src/legend.js
+++ b/src/legend.js
@@ -96,13 +96,21 @@ d3_calcType: function (scale, ascending, cells, labels, labelFormat, labelDelimi
type.labels = this.d3_mergeLabels(type.labels, labels);
if (ascending) {
- type.labels.reverse();
- type.data.reverse();
+ type.labels = this.d3_reverse(type.labels);
+ type.data = this.d3_reverse(type.data);
}
return type;
},
+d3_reverse: function(arr) {
+ var mirror = [];
+ for (var i = 0, l = arr.length; i < l; i++) {
+ mirror[i] = arr[l-i-1];
+ }
+ return mirror;
+},
+
d3_placement: function (orient, cell, cellTrans, text, textTrans, labelAlign) {
cell.attr("transform", cellTrans);
text.attr("transform", textTrans);
|
Do not affect actual range when using ascending option
`type.data.reverse` has the unfortunate side-effect of reversing the
original scale range passed as parameter.
With two charts sharing the same scale, using the `ascending` option on
one of them was affecting the other.
|
susielu_d3-legend
|
train
|
js
|
1d19dec7d2da514e0343e28273eafdb97f3a40a3
|
diff --git a/zookeeper/src/test/java/com/linecorp/armeria/server/zookeeper/ZooKeeperRegistrationTest.java b/zookeeper/src/test/java/com/linecorp/armeria/server/zookeeper/ZooKeeperRegistrationTest.java
index <HASH>..<HASH> 100644
--- a/zookeeper/src/test/java/com/linecorp/armeria/server/zookeeper/ZooKeeperRegistrationTest.java
+++ b/zookeeper/src/test/java/com/linecorp/armeria/server/zookeeper/ZooKeeperRegistrationTest.java
@@ -27,6 +27,7 @@ import org.apache.zookeeper.ZooKeeper;
import org.assertj.core.api.Assertions;
import org.junit.After;
import org.junit.Before;
+import org.junit.Ignore;
import org.junit.Test;
import com.linecorp.armeria.client.Endpoint;
@@ -117,6 +118,7 @@ public class ZooKeeperRegistrationTest extends TestBase implements ZooKeeperAsse
}
}
+ @Ignore // FIXME: https://github.com/line/armeria/issues/477
@Test
public void testConnectionRecovery() throws Exception {
ZooKeeperRegistration zkConnector = zkConnectors.get(0);
|
Ignore flaky zookeeper test for now. (#<I>)
|
line_armeria
|
train
|
java
|
6d5c8cc95f362a346d8008d9d9f9a38d8225ddee
|
diff --git a/code/RedirectedURLHandler.php b/code/RedirectedURLHandler.php
index <HASH>..<HASH> 100644
--- a/code/RedirectedURLHandler.php
+++ b/code/RedirectedURLHandler.php
@@ -31,7 +31,7 @@ class RedirectedURLHandler extends Extension {
/**
* @throws SS_HTTPResponse_Exception
*/
- protected function onBeforeHTTPError404($request) {
+ public function onBeforeHTTPError404($request) {
$base = strtolower($request->getURL());
$getVars = $this->arrayToLowercase($request->getVars());
|
BUG onBeforeHTTPError<I> must be public to be invokable
Fixing an error where framework/core/Object.php#<I> was falling over trying to call onBeforeHTTPError<I>() which was protected, causing the whole module to fail.
|
silverstripe_silverstripe-redirectedurls
|
train
|
php
|
4852eaf850cfa192ff3d20b1611773d9c4be6c33
|
diff --git a/src/effector/stdlib/typedef.js b/src/effector/stdlib/typedef.js
index <HASH>..<HASH> 100644
--- a/src/effector/stdlib/typedef.js
+++ b/src/effector/stdlib/typedef.js
@@ -33,18 +33,22 @@ export const step: {|
|}): Emit,
compute(data: {|
fn: (data: any, scope: {[string]: any, ...}) => any,
+ fail?: (data: mixed, scope: {[string]: any, ...}) => any,
meta?: NodeMeta,
|}): Compute,
filter(data: {|
fn: (data: any, scope: {[string]: any, ...}) => any,
+ fail?: (data: mixed, scope: {[string]: any, ...}) => any,
meta?: NodeMeta,
|}): Filter,
run(data: {
fn: (data: any, scope: {[string]: any, ...}) => any,
+ fail?: (data: mixed, scope: {[string]: any, ...}) => any,
meta?: NodeMeta,
}): Run,
tap(data: {
fn: (data: any, scope: {[string]: any, ...}) => any,
+ fail?: (data: mixed, scope: {[string]: any, ...}) => any,
meta?: NodeMeta,
}): Tap,
update(data: {|
|
Add fail handler to typedef
|
zerobias_effector
|
train
|
js
|
9e177be560a05e20dd7d041f3e685d76f8ab7b59
|
diff --git a/debug/handler/debug.go b/debug/handler/debug.go
index <HASH>..<HASH> 100644
--- a/debug/handler/debug.go
+++ b/debug/handler/debug.go
@@ -5,6 +5,8 @@ import (
"runtime"
"time"
+ "github.com/micro/go-micro/debug/log"
+
proto "github.com/micro/go-micro/debug/proto"
)
@@ -15,6 +17,7 @@ var (
type Debug struct {
started int64
+ log.Logger
}
func newDebug() *Debug {
diff --git a/debug/log/default.go b/debug/log/default.go
index <HASH>..<HASH> 100644
--- a/debug/log/default.go
+++ b/debug/log/default.go
@@ -40,7 +40,12 @@ func (l *defaultLogger) Write(v ...interface{}) {
// Read reads logs from the logger
func (l *defaultLogger) Read(n int) []interface{} {
- return l.Get(n)
+ entries := l.Get(n)
+ vals := make([]interface{}, 0, len(entries))
+ for _, val := range entries {
+ vals = append(vals, val)
+ }
+ return vals
}
func (l *defaultLogger) log(entry string) {
|
Embed logger into debug.Handler
|
micro_go-micro
|
train
|
go,go
|
27a2d7d9ed08932a64961500acf3f28ff976eb6b
|
diff --git a/bin/cmd.js b/bin/cmd.js
index <HASH>..<HASH> 100755
--- a/bin/cmd.js
+++ b/bin/cmd.js
@@ -483,7 +483,10 @@ function drawTorrent (torrent) {
'{green:' + (seeding ? 'seeding' : 'downloading') + ':} ' +
'{bold:' + torrent.name + '}'
)
- if (seeding) clivas.line('{green:magnet uri:} ' + torrent.magnetURI)
+ if (seeding) {
+ clivas.line('{green:magnet uri:} ' + torrent.magnetURI)
+ clivas.line('{green:info hash:} ' + torrent.infoHash)
+ }
clivas.line(
'{green:speed: }{bold:' + prettyBytes(speed) + '/s} ' +
'{green:downloaded:} {bold:' + prettyBytes(torrent.swarm.downloaded) + '}' +
|
Display info hash on commandline for seed option
Fix webtorrent seed doesn't show info hash
Fixes #<I>
|
webtorrent_webtorrent
|
train
|
js
|
4987ceff8d1a8f5c017b7b209f9920a55c569d9f
|
diff --git a/src/Applications/Controller/ApplyController.php b/src/Applications/Controller/ApplyController.php
index <HASH>..<HASH> 100644
--- a/src/Applications/Controller/ApplyController.php
+++ b/src/Applications/Controller/ApplyController.php
@@ -20,6 +20,7 @@ use Core\Form\Container;
use Core\Form\SummaryForm;
use Core\Entity\PermissionsInterface;
use Applications\Entity\Status;
+use Applications\Entity\StatusInterface;
/**
*
|
[Applications] fixes: StatusInterface is missing in ApplyController.
|
yawik_applications
|
train
|
php
|
214cbea6f904d1f3fd0b8094f78339d418ff7aa2
|
diff --git a/cptv/writer.py b/cptv/writer.py
index <HASH>..<HASH> 100644
--- a/cptv/writer.py
+++ b/cptv/writer.py
@@ -58,7 +58,9 @@ class CPTVWriter:
self.timestamp = datetime.now()
mtime = self.timestamp.timestamp()
- self.s = gzip.GzipFile(fileobj=self.fileobj, mode="wb", mtime=mtime)
+ self.s = gzip.GzipFile(
+ fileobj=self.fileobj, mode="wb", mtime=mtime, compresslevel=1
+ )
self.comp = Compressor()
self.s.write(MAGIC)
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -8,7 +8,7 @@ Cacophony Project Thermal Video (CPTV) files. It works with Python
setup(
name="cptv",
- version="1.5.2",
+ version="1.5.3",
description="Python library for handling Cacophony Project Thermal Video (CPTV) files",
long_description=long_description,
url="https://github.com/TheCacophonyProject/python-cptv",
|
lower compression level to 1 (#<I>)
* lower compression level to 1
* up version
|
TheCacophonyProject_python-cptv
|
train
|
py,py
|
77b1babf4591391d529dbcfb40f00a96319b9aa5
|
diff --git a/src/com/google/javascript/jscomp/debugger/CompilationParam.java b/src/com/google/javascript/jscomp/debugger/CompilationParam.java
index <HASH>..<HASH> 100644
--- a/src/com/google/javascript/jscomp/debugger/CompilationParam.java
+++ b/src/com/google/javascript/jscomp/debugger/CompilationParam.java
@@ -19,6 +19,7 @@ package com.google.javascript.jscomp.debugger;
import com.google.javascript.jscomp.AnonymousFunctionNamingPolicy;
import com.google.javascript.jscomp.CheckLevel;
import com.google.javascript.jscomp.CompilerOptions;
+import com.google.javascript.jscomp.CompilerOptions.J2clPassMode;
import com.google.javascript.jscomp.CompilerOptions.Reach;
import com.google.javascript.jscomp.DiagnosticGroup;
import com.google.javascript.jscomp.DiagnosticGroups;
@@ -740,6 +741,13 @@ enum CompilationParam {
}
},
+ J2CL_PASS {
+ @Override
+ void apply(CompilerOptions options, boolean value) {
+ options.setJ2clPass(value ? J2clPassMode.ON : J2clPassMode.OFF);
+ }
+ },
+
//--------------------------------
// Output options
//--------------------------------
|
Add J2CL_PASS to the debugger options.
-------------
Created by MOE: <URL>
|
google_closure-compiler
|
train
|
java
|
410ad0cf6a884e60bdf4d214ab695555dbe55292
|
diff --git a/lxd/device/disk.go b/lxd/device/disk.go
index <HASH>..<HASH> 100644
--- a/lxd/device/disk.go
+++ b/lxd/device/disk.go
@@ -178,6 +178,17 @@ func (d *disk) validateConfig(instConf instance.ConfigReader) error {
}
if d.config["pool"] != "" {
+ if d.inst != nil && !d.inst.IsSnapshot() {
+ _, pool, _, err := d.state.Cluster.GetStoragePoolInAnyState(d.config["pool"])
+ if err != nil {
+ return fmt.Errorf("Failed to get storage pool %q: %s", d.config["pool"], err)
+ }
+
+ if pool.Status == "Pending" {
+ return fmt.Errorf("Pool %q is pending", d.config["pool"])
+ }
+ }
+
if d.config["shift"] != "" {
return fmt.Errorf(`The "shift" property cannot be used with custom storage volumes`)
}
|
lxd/device/disk: Validate that the pool is not pending
|
lxc_lxd
|
train
|
go
|
0a6db6b194745ab2269276770eec11723b845f64
|
diff --git a/test/e2e_node/dynamic_kubelet_config_test.go b/test/e2e_node/dynamic_kubelet_config_test.go
index <HASH>..<HASH> 100644
--- a/test/e2e_node/dynamic_kubelet_config_test.go
+++ b/test/e2e_node/dynamic_kubelet_config_test.go
@@ -69,7 +69,7 @@ type nodeConfigTestCase struct {
}
// This test is marked [Disruptive] because the Kubelet restarts several times during this test.
-var _ = framework.KubeDescribe("[Feature:DynamicKubeletConfig][NodeAlphaFeature:DynamicKubeletConfig][Serial] [Disruptive]", func() {
+var _ = framework.KubeDescribe("[Feature:DynamicKubeletConfig][NodeFeature:DynamicKubeletConfig][Serial][Disruptive]", func() {
f := framework.NewDefaultFramework("dynamic-kubelet-configuration-test")
var beforeNode *apiv1.Node
var beforeConfigMap *apiv1.ConfigMap
|
Fix test tag on dynamic config tests
The test accidentally got turned off when the NodeAlphaFeature tag was
added in #<I>. This PR updates the tag to turn it back on.
|
kubernetes_kubernetes
|
train
|
go
|
6cab6071e55c2f88276c26c5e76414b780fce587
|
diff --git a/phypno/viz/plot_1d.py b/phypno/viz/plot_1d.py
index <HASH>..<HASH> 100644
--- a/phypno/viz/plot_1d.py
+++ b/phypno/viz/plot_1d.py
@@ -1,23 +1,16 @@
-import pyqtgraph as pg
-
-
-win = pg.GraphicsWindow(title="Basic plotting examples")
-win.resize(1000,600)
-win.setWindowTitle('pyqtgraph example: Plotting')
-
-# Enable antialiasing for prettier plots
-pg.setConfigOptions(antialias=True)
-
-p2 = win.addPlot(title="Multiple curves")
+from pyqtgraph import GraphicsWindow
def plot_data(data, xaxis='time', xlog=False, ylog=False):
"""Plot data in 2d.
"""
-
-
+ win = GraphicsWindow(title="plot data")
xval = getattr(data, xaxis)
+
for i_ch in range(len(data.chan_name)):
- p2.plot(xval, data.data[i_ch, :])
+ p = win.addPlot(title=data.chan_name[i_ch])
+ p.plot(xval, data.data[i_ch, :])
+ win.nextRow()
+ return win # avoid garbage-collection
|
pyqtgraph works well in ipython with gui qt
|
wonambi-python_wonambi
|
train
|
py
|
Subsets and Splits
Java Commits in Train Set
Queries for all entries where the diff_languages column is 'java', providing a filtered dataset but without deeper analysis.
Java Commits Test Data
Returns a subset of 5000 entries from the dataset where the programming language difference is Java, providing basic filtering for exploration.
Java Commits Sample
Retrieves the first 1,000 records where the 'diff_languages' column is 'java', providing limited insight into the specific data entries.
Java Commits Validation Sample
Retrieves a sample of entries from the validation dataset where the diff languages are Java, providing limited insight into specific Java-related data points.
Java Commits in Validation
This query retrieves a limited sample of entries from the validation dataset where the programming language difference is Java, providing basic filtering with minimal insight.
Java Commits Sample
This query retrieves a sample of 100 records where the 'diff_languages' is 'java', providing basic filtering but limited analytical value.
Java Commits Sample
Retrieves 100 samples where the language difference is Java, providing basic filtering but minimal analytical value.
Java Commits Sample
Retrieves 10 samples where the diff_languages column is 'java', providing basic examples of data entries with this specific language.
Java Commits Validation Sample
Retrieves 1,000 records where the differences in languages are marked as Java, providing a snapshot of that specific subset but limited to raw data.
Java Commits Sample
This query retrieves 1000 random samples from the dataset where the programming language is Java, offering limited insight beyond raw data.