hash
stringlengths 40
40
| diff
stringlengths 131
26.7k
| message
stringlengths 7
694
| project
stringlengths 5
67
| split
stringclasses 1
value | diff_languages
stringlengths 2
24
|
---|---|---|---|---|---|
f5e79bc5e1dd41d439c054ba423501e87ada1d08 | diff --git a/safe_qgis/tools/options_dialog.py b/safe_qgis/tools/options_dialog.py
index <HASH>..<HASH> 100644
--- a/safe_qgis/tools/options_dialog.py
+++ b/safe_qgis/tools/options_dialog.py
@@ -123,7 +123,7 @@ class OptionsDialog(QtGui.QDialog, Ui_OptionsDialogBase):
ratio = float(settings.value(
'inasafe/defaultFemaleRatio',
- DEFAULTS['FEM_RATIO'], type=float))
+ DEFAULTS['FEMALE_RATIO'], type=float))
self.dsbFemaleRatioDefault.setValue(ratio)
path = settings.value( | Fix option dialog can't be opened. | inasafe_inasafe | train | py |
8468a05b87931d716bcc8c8cd381742de9fe2770 | diff --git a/extensions/tags/src/Listener/SaveTagsToDatabase.php b/extensions/tags/src/Listener/SaveTagsToDatabase.php
index <HASH>..<HASH> 100755
--- a/extensions/tags/src/Listener/SaveTagsToDatabase.php
+++ b/extensions/tags/src/Listener/SaveTagsToDatabase.php
@@ -118,6 +118,7 @@ class SaveTagsToDatabase
$discussion->afterSave(function ($discussion) use ($newTagIds) {
$discussion->tags()->sync($newTagIds);
+ $discussion->unsetRelation('tags');
});
}
} | Unset tags relation after setting (#<I>)
This ensures that the proper tag values are returned to the API by clearing any cached tags before returning a response. It also makes sure that the listeners to the `DiscussionWasTagged` event won't have old data under `$event->discussion->tags`.
Fixes <URL> | flarum_core | train | php |
306ba81c376513803509ad7117229c763261a794 | diff --git a/lib/opal/cli_runners/chrome.js b/lib/opal/cli_runners/chrome.js
index <HASH>..<HASH> 100644
--- a/lib/opal/cli_runners/chrome.js
+++ b/lib/opal/cli_runners/chrome.js
@@ -12,6 +12,7 @@ fs.writeFileSync("/tmp/chrome-opal.js", opal_code);
fs.writeFileSync("/tmp/chrome-opal.html", "" +
"<html>" +
"<head>" +
+ "<meta charset='utf-8'>" +
"<script src='chrome-opal.js'></script>" +
"</head>" +
"<body>" + | Add charset meta tag to headless chrome runner | opal_opal | train | js |
60c80f5fe5c83cf0ac45414d6e56c86fe3b9a0a6 | diff --git a/vmware/tags.go b/vmware/tags.go
index <HASH>..<HASH> 100644
--- a/vmware/tags.go
+++ b/vmware/tags.go
@@ -22,9 +22,12 @@ type Tag struct {
}
func LoadTags() (Tags, error) {
- f, e := os.Open(tagsPath)
- if e != nil {
- return nil, e
+ f, err := os.Open(tagsPath)
+ if err != nil {
+ if os.IsNotExist(err) {
+ return nil, nil
+ }
+ return nil, err
}
defer f.Close()
var tags Tags | vmware/vm: ignore errors if tags file does not exist | dynport_dgtk | train | go |
90904a80a8b8d19c4a1c889fb8e7881b1aeebb15 | diff --git a/config/admin/admin.go b/config/admin/admin.go
index <HASH>..<HASH> 100644
--- a/config/admin/admin.go
+++ b/config/admin/admin.go
@@ -171,6 +171,17 @@ func init() {
product.UseTheme("grid")
variationsResource := product.Meta(&admin.Meta{Name: "Variations", Config: &variations.VariationsConfig{}}).Resource
+ if imagesMeta := variationsResource.GetMeta("Images"); imagesMeta != nil {
+ imagesMeta.Config = &media_library.MediaBoxConfig{
+ RemoteDataResource: ProductImagesResource,
+ Sizes: map[string]*media_library.Size{
+ "icon": {Width: 50, Height: 50},
+ "thumb": {Width: 100, Height: 100},
+ "display": {Width: 300, Height: 300},
+ },
+ }
+ }
+
variationsResource.EditAttrs("-ID", "-Product")
product.SearchAttrs("Name", "Code", "Category.Name", "Brand.Name") | Add sizes to variations meta (need to refactor later) | qor_qor-example | train | go |
a93ce1bb2e3e9d42b94205d8572a51b4af6c1951 | diff --git a/arcrest/ago.py b/arcrest/ago.py
index <HASH>..<HASH> 100644
--- a/arcrest/ago.py
+++ b/arcrest/ago.py
@@ -41,3 +41,12 @@ class AGORoot(server.RestURL):
@property
def portals(self):
return self._get_subfolder("./portals/", Portals)
+
+class Community(server.RestURL):
+ pass
+
+class Content(server.RestURL):
+ pass
+
+class Portals(server.RestURL):
+ pass | Stubs for ArcGIS Online APIs | jasonbot_arcrest | train | py |
7e129b2185db64ce9de5f24f4df5a5908a0045eb | diff --git a/crossplane/__init__.py b/crossplane/__init__.py
index <HASH>..<HASH> 100644
--- a/crossplane/__init__.py
+++ b/crossplane/__init__.py
@@ -8,7 +8,7 @@ __title__ = 'crossplane'
__summary__ = 'Reliable and fast NGINX configuration file parser.'
__url__ = 'https://github.com/nginxinc/crossplane'
-__version__ = '0.1.2'
+__version__ = '0.1.3'
__author__ = 'Arie van Luttikhuizen'
__email__ = '[email protected]' | Updated version to <I> | nginxinc_crossplane | train | py |
829550c81fb0fc43c79e9f15f378610ad400529e | diff --git a/src/Titon/Db/Entity.php b/src/Titon/Db/Entity.php
index <HASH>..<HASH> 100644
--- a/src/Titon/Db/Entity.php
+++ b/src/Titon/Db/Entity.php
@@ -97,7 +97,14 @@ class Entity implements Serializable, JsonSerializable, Iterator, ArrayAccess, C
* @return array
*/
public function toArray() {
- return $this->_toArray($this);
+ $data = [];
+
+ // Loop and trigger any closures
+ foreach ($this->keys() as $key) {
+ $data[$key] = $this->get($key);
+ }
+
+ return $this->_toArray($data);
}
/** | Fix Entity::toArray() not executing closures | titon_db | train | php |
a7359f0b46219b82194cf407a6ac6debceec408e | diff --git a/environs/cloudinit.go b/environs/cloudinit.go
index <HASH>..<HASH> 100644
--- a/environs/cloudinit.go
+++ b/environs/cloudinit.go
@@ -17,7 +17,6 @@ import (
// NewMachineConfig sets up a basic machine configuration. You'll still need
// to supply more information, but this takes care of the fixed entries and
// the ones that are always needed.
-// TODO(bug 1199847): This work can be shared between providers.
func NewMachineConfig(machineID, machineNonce string,
stateInfo *state.Info, apiInfo *api.Info) *cloudinit.MachineConfig {
return &cloudinit.MachineConfig{ | Satisfying that TODO. | juju_juju | train | go |
2d695b3c83e23a458421691f411ac0aa183fddf6 | diff --git a/fakeKeeper.js b/fakeKeeper.js
index <HASH>..<HASH> 100644
--- a/fakeKeeper.js
+++ b/fakeKeeper.js
@@ -23,7 +23,7 @@ function keeperForMap (map) {
getAll: function () {
return Q.resolve(values(map))
},
- close: function () {
+ destroy: function () {
return Q.resolve()
},
isKeeper: function () { | add keeper.destroy() to fakeKeeper | tradle_test-helpers | train | js |
42ad1ac59c048425f53c040b2d91e213ac37bdac | diff --git a/datatableview/static/js/datatableview.js b/datatableview/static/js/datatableview.js
index <HASH>..<HASH> 100644
--- a/datatableview/static/js/datatableview.js
+++ b/datatableview/static/js/datatableview.js
@@ -149,7 +149,7 @@ var datatableview = {
search_input.after(clear_button).after(' ');
datatable.DataTable(options);
});
- return $(initialized_datatables).dataTable();
+ return $$;
}
} | Return own collection
New DataTable apis don't work well with this kind of collection | pivotal-energy-solutions_django-datatable-view | train | js |
9fb32a27fea416b077f47e154d17726cc28727fc | diff --git a/frasco_upload/__init__.py b/frasco_upload/__init__.py
index <HASH>..<HASH> 100644
--- a/frasco_upload/__init__.py
+++ b/frasco_upload/__init__.py
@@ -103,7 +103,7 @@ class UploadFeature(Feature):
@action(default_option='file')
def save_uploaded_file_temporarly(self, file, filename=None):
if filename:
- tmpfilename = os.path.join(self.options['upload_tmp_dir'] or gettempdir(), filename)
+ tmpfilename = os.path.join(self.options['upload_tmp_dir'] or gettempdir(), filename.replace('/', '-'))
else:
tmp = NamedTemporaryFile(delete=False, dir=self.options['upload_tmp_dir'])
tmp.close()
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -3,7 +3,7 @@ from setuptools import setup, find_packages
setup(
name='frasco-upload',
- version='0.3',
+ version='0.3.1',
url='http://github.com/frascoweb/frasco-upload',
license='MIT',
author='Maxime Bouroumeau-Fuseau', | don't use subdirs in save_uploaded_file_temporarly() | frascoweb_frasco-upload | train | py,py |
a447aecd953a7a4ab469777b237624cca02ebbb3 | diff --git a/public/js/app.js b/public/js/app.js
index <HASH>..<HASH> 100644
--- a/public/js/app.js
+++ b/public/js/app.js
@@ -640,7 +640,7 @@ function initIssue() {
var clickedButton = undefined;
- $("button,input[type=\"submit\"]", fileInput.form).on("click", function() {
+ $('#issue-reply-btn,input[type="submit"]', fileInput.form).on('click', function() {
clickedButton = this;
var $button = $(this); | Fix for erroneous Submitting mode on button
Click handler was also attaching to “Select Attachments” button, so
modified selector to grab #issue-reply-btn by ID and submit button | gogs_gogs | train | js |
1362ca631c1fbf1e1436d75b3b112ddedc39480a | diff --git a/packages/heroku-spaces/commands/create.js b/packages/heroku-spaces/commands/create.js
index <HASH>..<HASH> 100644
--- a/packages/heroku-spaces/commands/create.js
+++ b/packages/heroku-spaces/commands/create.js
@@ -54,7 +54,7 @@ Example:
{name: 'space', char: 's', hasValue: true, description: 'name of space to create'},
{name: 'channel', hasValue: true, hidden: true},
{name: 'region', hasValue: true, description: 'region name'},
- {name: 'features', hasValue: true, hidden: 'true', description: 'a list of features separated by commas'},
+ {name: 'features', hasValue: true, hidden: true, description: 'a list of features separated by commas'},
{name: 'log-drain-url', hasValue: true, hidden: true, description: 'direct log drain url'}
],
run: cli.command(co.wrap(run)) | misc: This shouldn't be a string | heroku_cli | train | js |
94ce1ef1e627c5ac12ab3988f38496765f6bf131 | diff --git a/shakedown/dcos/__init__.py b/shakedown/dcos/__init__.py
index <HASH>..<HASH> 100644
--- a/shakedown/dcos/__init__.py
+++ b/shakedown/dcos/__init__.py
@@ -1,6 +1,7 @@
import os
import dcos
import dcos.cluster
+import sys
import shakedown
@@ -9,7 +10,8 @@ def attach_cluster(url):
"""Attach to an already set-up cluster
:return: True if successful, else False
"""
- clusters = [c.dict() for c in dcos.cluster.get_clusters()]
+ with shakedown.stdchannel_redirected(sys.stderr, os.devnull):
+ clusters = [c.dict() for c in dcos.cluster.get_clusters()]
for c in clusters:
if url == c['url']:
try: | Don't output exception if trying to attach
except silently and continue attachment attempts if a previously set-up
cluster is no longer reachable. | dcos_shakedown | train | py |
cb692b4000d8b195bac3030dac3ed1725b1e5c11 | diff --git a/django_js_reverse/core.py b/django_js_reverse/core.py
index <HASH>..<HASH> 100755
--- a/django_js_reverse/core.py
+++ b/django_js_reverse/core.py
@@ -2,7 +2,7 @@
import json
import re
import sys
-from distutils.version import StrictVersion
+from distutils.version import LooseVersion
import django
from django.conf import settings
@@ -90,7 +90,7 @@ def prepare_url_list(urlresolver, namespace_path='', namespace=''):
args = [inner_ns_path, inner_urlresolver]
# https://github.com/ierror/django-js-reverse/issues/65
- if StrictVersion(django.get_version()) >= StrictVersion("2.0.6"):
+ if LooseVersion(django.get_version()) >= LooseVersion("2.0.6"):
args.append(tuple(urlresolver.pattern.converters.items()))
inner_urlresolver = urlresolvers.get_ns_resolver(*args) | Now using LooseVersion instead of StrictVersion to avoid issues with rc releases. | ierror_django-js-reverse | train | py |
d35e490a52e365c9b74af7dc435fe62e7681d566 | diff --git a/app/models/scaptimony/scap_content.rb b/app/models/scaptimony/scap_content.rb
index <HASH>..<HASH> 100644
--- a/app/models/scaptimony/scap_content.rb
+++ b/app/models/scaptimony/scap_content.rb
@@ -6,7 +6,12 @@ require 'scaptimony/engine'
module Scaptimony
class DataStreamValidator < ActiveModel::Validator
def validate(scap_content)
- if scap_content.new_record? and scap_content.scap_file.nil?
+ if !scap_content.new_record?
+ return true if scap_content.scap_file.nil?
+ scap_content.errors[:base] << _("Cannot change uploaded file while editing content.")
+ return false
+ end
+ if scap_content.scap_file.nil?
scap_content.errors[:base] << _("Please select file for upload.")
return false
end | Error message for users uploading while editing. | OpenSCAP_scaptimony | train | rb |
d6a7b65cbed4412ec891cf1f718eff21f9a1ed8d | diff --git a/test/unit/lookups/yandex_test.rb b/test/unit/lookups/yandex_test.rb
index <HASH>..<HASH> 100644
--- a/test/unit/lookups/yandex_test.rb
+++ b/test/unit/lookups/yandex_test.rb
@@ -21,7 +21,7 @@ class YandexTest < GeocoderTestCase
end
def test_yandex_query_url_contains_bbox
- lookup = Geocoder::Lookup::Google.new
+ lookup = Geocoder::Lookup::Yandex.new
url = lookup.query_url(Geocoder::Query.new(
"Some Intersection",
:bounds => [[40.0, -120.0], [39.0, -121.0]] | Fix: incorrect lookup used in test. | alexreisner_geocoder | train | rb |
c536c09a44cb281b6c88e0241824a0c703d22a40 | diff --git a/provision/juju/healer.go b/provision/juju/healer.go
index <HASH>..<HASH> 100644
--- a/provision/juju/healer.go
+++ b/provision/juju/healer.go
@@ -185,6 +185,7 @@ func (h instanceAgentsConfigHealer) Heal() error {
if err != nil {
return err
}
+ defer conn.Close()
var apps []app.App
err = conn.Apps().Find(nil).All(&apps)
if err != nil {
@@ -224,6 +225,7 @@ func (h instanceUnitHealer) Heal() error {
if err != nil {
return err
}
+ defer conn.Close()
var apps []app.App
err = conn.Apps().Find(nil).All(&apps)
if err != nil {
@@ -412,6 +414,7 @@ func (h elbInstanceHealer) getUnhealthyApps() map[string]app.App {
if err != nil {
return nil
}
+ defer conn.Close()
var all []app.App
apps := make(map[string]app.App)
s := map[string]interface{}{"name": 1, "units": 1} | provision/juju: close database connections after using them
Related to #<I>. | tsuru_tsuru | train | go |
d11a9ea126975f415e7adc78d9cf942b89408f1c | diff --git a/uncompyle6/parsers/parse2.py b/uncompyle6/parsers/parse2.py
index <HASH>..<HASH> 100644
--- a/uncompyle6/parsers/parse2.py
+++ b/uncompyle6/parsers/parse2.py
@@ -664,7 +664,7 @@ class Python2Parser(PythonParser):
elif lhs == "assert_expr_and":
jmp_false = ast[1]
jump_target = jmp_false[0].attr
- return jump_target > tokens[last].offset
+ return jump_target > tokens[last].off2int()
elif lhs in ("raise_stmt1",):
# We will assume 'LOAD_ASSERT' will be handled by an assert grammar rule
return tokens[first] == "LOAD_ASSERT" and (last >= len(tokens)) | Remember rocky: use off2int() in offset testing! | rocky_python-uncompyle6 | train | py |
c6fe7d6a558624dfa6567f3c4d62e6615dcfca24 | diff --git a/asd/openvpn_manage.py b/asd/openvpn_manage.py
index <HASH>..<HASH> 100644
--- a/asd/openvpn_manage.py
+++ b/asd/openvpn_manage.py
@@ -108,10 +108,10 @@ class OpenVPNManager(ToolBase):
return missing_files
def __generate_client_keys(self, name):
- command = ['bash', '-c', 'source vars && ./pkitool {}'.format(args.username)]
+ command = ['bash', '-c', 'source vars && ./pkitool {}'.format(name)]
try:
- logger.info(check_output(command, stdout = PIPE, stderr = PIPE))
+ logger.info(check_output(command, stderr = PIPE))
return True
except CalledProcessError as e:
logger.exception(e)
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -3,7 +3,7 @@ from setuptools import setup, find_packages
setup(
name = "asd",
description = "Various command line helper scripts`",
- version = "1.3.0",
+ version = "1.3.1",
author = 'Lajos Santa',
author_email = '[email protected]',
url = 'https://github.com/voidpp/asd.git', | Openvpn: keys were not generated due typo | voidpp_asd | train | py,py |
2067525b65c0d507f8d3fa96c1d95175162fd192 | diff --git a/lib/sufia/version.rb b/lib/sufia/version.rb
index <HASH>..<HASH> 100644
--- a/lib/sufia/version.rb
+++ b/lib/sufia/version.rb
@@ -1,3 +1,3 @@
module Sufia
- VERSION = "4.1.0"
+ VERSION = "4.2.0"
end
diff --git a/sufia-models/lib/sufia/models/version.rb b/sufia-models/lib/sufia/models/version.rb
index <HASH>..<HASH> 100644
--- a/sufia-models/lib/sufia/models/version.rb
+++ b/sufia-models/lib/sufia/models/version.rb
@@ -1,5 +1,5 @@
module Sufia
module Models
- VERSION = "4.1.0"
+ VERSION = "4.2.0"
end
end | Preparing for <I> release | samvera_hyrax | train | rb,rb |
6c82cdc20d6f81c96772da73fc07a672a0a0a6ef | diff --git a/plotnine/stats/stat_bin_2d.py b/plotnine/stats/stat_bin_2d.py
index <HASH>..<HASH> 100644
--- a/plotnine/stats/stat_bin_2d.py
+++ b/plotnine/stats/stat_bin_2d.py
@@ -45,6 +45,10 @@ class stat_bin_2d(stat):
::
+ 'xmin' # x lower bound for the bin
+ 'xmax' # x upper bound for the bin
+ 'ymin' # y lower bound for the bin
+ 'ymax' # y upper bound for the bin
'count' # number of points in bin
'density' # density of points in bin, scaled to integrate to 1 | DOC: xmin, ... computed aesthetics for stat_bin_2d | has2k1_plotnine | train | py |
782e728f10357cb0b8252e433078888c7f2a64e4 | diff --git a/src/Plinth/Main.php b/src/Plinth/Main.php
index <HASH>..<HASH> 100644
--- a/src/Plinth/Main.php
+++ b/src/Plinth/Main.php
@@ -128,6 +128,7 @@ class Main {
'templatebase' => 'base',
'templatepath' => __TEMPLATE,
'assetpath' => false,
+ 'route403' => false,
'route404' => false,
'route405' => false
);
diff --git a/src/Plinth/Response/Response.php b/src/Plinth/Response/Response.php
index <HASH>..<HASH> 100644
--- a/src/Plinth/Response/Response.php
+++ b/src/Plinth/Response/Response.php
@@ -187,6 +187,7 @@ class Response extends Connector {
$exitRoute = false;
switch ($code) {
+ case self::CODE_403: $exitRoute = $this->Main()->getSetting('route403'); break;
case self::CODE_404: $exitRoute = $this->Main()->getSetting('route404'); break;
case self::CODE_405: $exitRoute = $this->Main()->getSetting('route405'); break;
} | Handle <I>'s more direct | Warsaalk_Plinth | train | php,php |
157547a51fd9a77ba929fca4631a3f52b124cb5a | diff --git a/class.phpmailer.php b/class.phpmailer.php
index <HASH>..<HASH> 100644
--- a/class.phpmailer.php
+++ b/class.phpmailer.php
@@ -2910,11 +2910,7 @@ class PHPMailer
}
$cid = md5($url) . '@phpmailer.0'; // RFC2392 S 2
if ($this->addStringEmbeddedImage($data, $cid, '', 'base64', $match[1])) {
- $message = preg_replace(
- '/' . $images[1][$imgindex] . '=["\']' . preg_quote($url, '/') . '["\']/Ui',
- $images[1][$imgindex] . '="cid:' . $cid . '"',
- $message
- );
+ $message = str_replace($images[0][$imgindex], $images[1][$imgindex] . '="cid:' . $cid . '"', $message);
}
} elseif (!preg_match('#^[A-z]+://#', $url)) {
// Do not change urls for absolute images (thanks to corvuscorax) | Fixing an issue with embedded images
Fixed the issue with html parsing when there was an embedded base<I>
encoded image in the source - which resulted in regexp errors (too long
regexp). | PHPMailer_PHPMailer | train | php |
7f9904deff6fcf2d9f2c71c543487c3f36e64a66 | diff --git a/lib/countries/country.rb b/lib/countries/country.rb
index <HASH>..<HASH> 100644
--- a/lib/countries/country.rb
+++ b/lib/countries/country.rb
@@ -112,7 +112,7 @@ class ISO3166::Country
def all(&blk)
blk ||= proc { |country, data| [data['name'], country] }
- Data.map &blk
+ Data.map(&blk)
end
alias_method :countries, :all | Add explicit parens to drop the Ruby warning | hexorx_countries | train | rb |
ce1a5b7d7ad02c581235b381a19b48785e3ee800 | diff --git a/post-processor/compress/post-processor.go b/post-processor/compress/post-processor.go
index <HASH>..<HASH> 100644
--- a/post-processor/compress/post-processor.go
+++ b/post-processor/compress/post-processor.go
@@ -144,8 +144,8 @@ func (p *PostProcessor) PostProcess(ui packer.Ui, artifact packer.Artifact) (pac
output, err = makeLZ4Writer(outputFile, p.config.CompressionLevel)
defer output.Close()
case "xz":
- ui.Say(fmt.Sprintf("Using xz compression with %d cores for %s",
- runtime.GOMAXPROCS(-1), target))
+ ui.Say(fmt.Sprintf("Using xz compression with 1 core for %s (library does not support MT)",
+ target))
output, err = makeXZWriter(outputFile, p.config.CompressionLevel)
defer output.Close()
case "pgzip": | Print why we are only using one core | hashicorp_packer | train | go |
ed3bd1974b5b8abb8d8a9ede47df5d5dc2dc2a87 | diff --git a/mod/wiki/backuplib.php b/mod/wiki/backuplib.php
index <HASH>..<HASH> 100644
--- a/mod/wiki/backuplib.php
+++ b/mod/wiki/backuplib.php
@@ -55,7 +55,7 @@
fwrite ($bf,full_tag("MODTYPE",4,false,"wiki"));
fwrite ($bf,full_tag("NAME",4,false,$wiki->name));
fwrite ($bf,full_tag("SUMMARY",4,false,$wiki->summary));
- fwrite ($bf,full_tag("PAGENAME",4,false,$wiki->wtype));
+ fwrite ($bf,full_tag("PAGENAME",4,false,$wiki->pagename));
fwrite ($bf,full_tag("WTYPE",4,false,$wiki->wtype));
fwrite ($bf,full_tag("EWIKIPRINTTITLE",4,false,$wiki->ewikiprinttitle));
fwrite ($bf,full_tag("HTMLMODE",4,false,$wiki->htmlmode)); | Fixed old-glorious typo in wiki backup. MDL-<I> ; merged from <I>_STABLE | moodle_moodle | train | php |
3947637cfcff5fd81dfe39b8f27f871648845819 | diff --git a/code/MemberProfilePage.php b/code/MemberProfilePage.php
index <HASH>..<HASH> 100644
--- a/code/MemberProfilePage.php
+++ b/code/MemberProfilePage.php
@@ -744,6 +744,8 @@ class MemberProfilePage_Controller extends Page_Controller {
$member->ValidationKey = null;
$member->write();
+ $this->extend('onConfirm', $member);
+
$member->logIn();
return array ( | Added an extension hook for email confirmation | symbiote_silverstripe-memberprofiles | train | php |
dc1bd03377e978937d3feb04a937254ba3e11f1b | diff --git a/spec/watirspec/div_spec.rb b/spec/watirspec/div_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/watirspec/div_spec.rb
+++ b/spec/watirspec/div_spec.rb
@@ -89,13 +89,13 @@ describe "Div" do
end
describe "#style" do
- not_compliant_on [:webdriver, :ie] do
+ not_compliant_on :ie do
it "returns the style attribute if the element exists" do
browser.div(:id, 'best_language').style.should == "color: red; text-decoration: underline; cursor: pointer;"
end
end
- deviates_on [:webdriver, :ie] do
+ deviates_on :ie do
it "returns the style attribute if the element exists" do
browser.div(:id, 'best_language').style.should == "COLOR: red; CURSOR: pointer; TEXT-DECORATION: underline"
end
@@ -191,7 +191,7 @@ describe "Div" do
end
describe "#html" do
- not_compliant_on [:webdriver, :ie] do
+ not_compliant_on :ie do
it "returns the HTML of the element" do
html = browser.div(:id, 'footer').html.downcase
html.should include('id="footer"')
@@ -203,7 +203,7 @@ describe "Div" do
end
end
- deviates_on [:webdriver, :ie] do
+ deviates_on :ie do
it "returns the HTML of the element" do
html = browser.div(:id, 'footer').html.downcase
html.should include('id=footer') | modify guards for Watir | watir_watir | train | rb |
88be4bfbef4c3a803de75f5377f62451f945e6e1 | diff --git a/benchexec/tools/consequence.py b/benchexec/tools/consequence.py
index <HASH>..<HASH> 100644
--- a/benchexec/tools/consequence.py
+++ b/benchexec/tools/consequence.py
@@ -21,15 +21,21 @@ import benchexec.util as util
import benchexec.tools.template
import benchexec.result as result
+REQUIRED_PATHS = [
+ "bin",
+ "check.sh",
+ "ConSequence",
+ "consequence.pl",
+ "deps",
+ "jars",
+ "setup_consequence.pl",
+ ]
+
class Tool(benchexec.tools.template.BaseTool):
"""
ConSequence
"""
- REQUIRED_PATHS = [
- "consequence.pl"
- ]
-
def executable(self):
return util.find_executable('consequence.pl') | Fix required paths for ConSequence | sosy-lab_benchexec | train | py |
195b837a21b845a05107971bd65665878f195e22 | diff --git a/ommprotocol/utils.py b/ommprotocol/utils.py
index <HASH>..<HASH> 100644
--- a/ommprotocol/utils.py
+++ b/ommprotocol/utils.py
@@ -76,7 +76,7 @@ def timed_input(prompt, timeout=300.0):
astring = None
try:
timer.start()
- astring = raw_input(prompt)
+ astring = raw_input()
except KeyboardInterrupt:
pass
timer.cancel() | fix: user prompt on emergency recovery was printed twice | insilichem_ommprotocol | train | py |
d9172d8f3dc16173cf2aef400712f8bd2663f2d2 | diff --git a/patch-op.js b/patch-op.js
index <HASH>..<HASH> 100644
--- a/patch-op.js
+++ b/patch-op.js
@@ -140,7 +140,7 @@ function reorderChildren(domNode, bIndex) {
}
node = children[move]
- insertNode = childNodes[i + insertOffset]
+ insertNode = childNodes[i + insertOffset] || null
if (node !== insertNode) {
domNode.insertBefore(node, insertNode)
} | ensure insertBefore is passed null not undefined | Matt-Esch_vdom | train | js |
cf2b144d93f79568c6b4e31cc77c569f0820f193 | diff --git a/tests/testEngine.js b/tests/testEngine.js
index <HASH>..<HASH> 100644
--- a/tests/testEngine.js
+++ b/tests/testEngine.js
@@ -9,6 +9,11 @@ testEngine.command('delay', (_, {send}) => {
setTimeout(() => send('ok'), 500)
})
+testEngine.command('play', (command, {send, err}) => {
+ if (command.args.length === 0) return err('player not specified')
+ send('playing for ' + command.args[0])
+})
+
testEngine.command('multiline', (_, {write, end}) => {
setTimeout(() => write('multi\n'), 500)
setTimeout(() => (write('line'), end()), 1000) | Add command to show how to get arguments | SabakiHQ_gtp | train | js |
b400a236c72f38e98f436733d5340a52a065a034 | diff --git a/lib/jinjs_prev.js b/lib/jinjs_prev.js
index <HASH>..<HASH> 100644
--- a/lib/jinjs_prev.js
+++ b/lib/jinjs_prev.js
@@ -122,7 +122,7 @@ function make_expr (str, ctx) {
if (str.charAt (0) == ".")
return str;
for (var i = 0; i < ctx.length; i++) {
- if (str.match (new RegExp ("^" + ctx[i] + "([^\\w]|$)")) ) {
+ if (str.match (new RegExp ("^\s*" + ctx[i] + "([^\\w]|$)")) ) {
return str;
}
} | Fixed context issues in expression that resolved variables wrongly. | ravelsoft_node-jinjs | train | js |
e5a987a4caab7bae261018a39d6c693c03da5cd2 | diff --git a/lib/sudo.rb b/lib/sudo.rb
index <HASH>..<HASH> 100644
--- a/lib/sudo.rb
+++ b/lib/sudo.rb
@@ -26,7 +26,7 @@ module Sudo
def initialize(ruby_opts='')
@proxy = nil
- @socket = "/tmp/rubysu-#{rand(100000)}"
+ @socket = "/tmp/rubysu-#{Process.pid}-#{object_id}"
server_uri = "drbunix:#{@socket}"
# just to check if we can sudo; and we'll receive a sudo token
@@ -35,6 +35,13 @@ module Sudo
@server_pid = spawn(
"sudo ruby -I#{LIBDIR} #{ruby_opts} #{SERVER_SCRIPT} #{@socket} #{Process.uid}"
)
+ at_exit do
+ if @server_pid
+ system "sudo kill #{@server_pid}" or
+ system "sudo kill -9 #{@server_pid}"
+ end
+ end
+
if wait_for(:timeout => 1){File.exists? @socket}
@proxy = DRbObject.new_with_uri(server_uri)
if block_given? | kill spawned process at_exit | gderosa_rubysu | train | rb |
e80291dba382465ec7731ee152db4de99619159f | diff --git a/angr/analyses/cfg_base.py b/angr/analyses/cfg_base.py
index <HASH>..<HASH> 100644
--- a/angr/analyses/cfg_base.py
+++ b/angr/analyses/cfg_base.py
@@ -826,17 +826,17 @@ class CFGBase(Analysis):
# if there are multiple jump out sites and we have determined the "returning status" from one of
# the jump out sites, we can exit the loop early
continue
-
- if not jump_out_site.successors():
+ jump_out_site_successors = jump_out_site.successors()
+ if not jump_out_site_successors:
# not sure where it jumps to. bail out
bail_out = True
continue
- jump_out_target = jump_out_site.successors()[0]
- target_func = self.kb.functions.get(jump_out_target.addr, None)
- if target_func is None:
+ jump_out_target = jump_out_site_successors[0]
+ if not self.kb.functions.contains_addr(jump_out_target.addr):
# wait it does not jump to a function?
bail_out = True
continue
+ target_func = self.kb.functions[jump_out_target.addr]
if target_func.returning is True:
func.returning = True
bail_out = True | CFGBase: micro-optimization to speed-up function analysis | angr_angr | train | py |
386f8910017200f204f4963ebb40ca6d526b74ff | diff --git a/includes/models/class.player.php b/includes/models/class.player.php
index <HASH>..<HASH> 100644
--- a/includes/models/class.player.php
+++ b/includes/models/class.player.php
@@ -15,26 +15,10 @@ class player extends stat_object {
*/
protected $_steamid;
/**
- * @var int
- */
- protected $_communityvisibilitystate;
- /**
- * @var int
- */
- protected $_profilestate;
- /**
* @var string
*/
protected $_personaname;
/**
- * @var timestamp
- */
- protected $_lastlogoff;
- /**
- * @var int
- */
- protected $_commentpermission;
- /**
* @var string
*/
protected $_profileurl;
@@ -42,30 +26,6 @@ class player extends stat_object {
* @var string
*/
protected $_avatar;
- /**
- * @var string
- */
- protected $_avatarmedium;
- /**
- * @var string
- */
- protected $_avatarfull;
- /**
- * @var int
- */
- protected $_personastate;
- /**
- * @var string
- */
- protected $_realname;
- /**
- * @var string
- */
- protected $_primaryclanid;
- /**
- * @var timestamp
- */
- protected $_timecreated;
/**
* | #<I> Cannot update with player_mapper_db | kronusme_dota2-api | train | php |
f36c218b13115950df8f25a7481796f302ee1e24 | diff --git a/fs.go b/fs.go
index <HASH>..<HASH> 100644
--- a/fs.go
+++ b/fs.go
@@ -77,6 +77,9 @@ var (
// Path rewriter is used in FS for translating the current request
// to the local filesystem path relative to FS.Root.
//
+// The returned path must not contain '/../' substrings due to security reasons,
+// since such paths may refer files outside FS.Root.
+//
// The returned path may refer to ctx members. For example, ctx.Path().
type PathRewriteFunc func(ctx *RequestCtx) []byte
@@ -592,6 +595,11 @@ func (h *fsHandler) handleRequest(ctx *RequestCtx) {
ctx.Error("Are you a hacker?", StatusBadRequest)
return
}
+ if n := bytes.Index(path, strSlashDotDotSlash); n >= 0 {
+ ctx.Logger().Printf("cannot serve path with '/../' at position %d due to security reasons: %q", n, path)
+ ctx.Error("Internal Server Error", StatusInternalServerError)
+ return
+ }
mustCompress := false
fileCache := h.cache | FS: make sure that the path returned from PathRewriteFunc doesn't contain '/../' due to security reasons | valyala_fasthttp | train | go |
d5517d4d8939dc256fb9cdc256b6590673c23db6 | diff --git a/lib/topsy/version.rb b/lib/topsy/version.rb
index <HASH>..<HASH> 100644
--- a/lib/topsy/version.rb
+++ b/lib/topsy/version.rb
@@ -1,3 +1,3 @@
module Topsy
- VERSION = '0.3.2'
+ VERSION = '0.3.3'
end
\ No newline at end of file | Bumped version to <I> | pengwynn_topsy | train | rb |
fd1f357d1373bde8fd6cfc1c33bb1e3cb51c9fbd | diff --git a/vendor/refinerycms/inquiries/config/routes.rb b/vendor/refinerycms/inquiries/config/routes.rb
index <HASH>..<HASH> 100644
--- a/vendor/refinerycms/inquiries/config/routes.rb
+++ b/vendor/refinerycms/inquiries/config/routes.rb
@@ -1,7 +1,6 @@
Refinery::Application.routes.draw do
- match '/contact', :to => 'inquiries#new', :as => 'new_inquiry'
- match '/contact/thank_you', :to => 'inquiries#thank_you', :as => 'thank_you_inquiries'
- resources :inquiries do
+ get '/contact', :to => 'inquiries#new', :as => 'new_inquiry'
+ resources :contact, :as => :inquiries, :controller => 'inquiries' do
collection do
get :thank_you
end | Use the routes better, no more /inquiries when you submit the form (ht: gidogeek) | refinery_refinerycms | train | rb |
a4f0f8cfed521ec047575322fa48bfac511c669d | diff --git a/pyemma/coordinates/io/featurizer.py b/pyemma/coordinates/io/featurizer.py
index <HASH>..<HASH> 100644
--- a/pyemma/coordinates/io/featurizer.py
+++ b/pyemma/coordinates/io/featurizer.py
@@ -698,9 +698,11 @@ class MDFeaturizer(object):
"""
# if there are no features selected, return given trajectory
if len(self.active_features) == 0:
- warnings.warn(
- "You have no features selected. Returning plain coordinates.")
- return traj.xyz
+ warnings.warn("You have no features selected."
+ " Returning plain coordinates.")
+ s = traj.xyz.shape
+ new_shape = (s[0], s[1] * s[2])
+ return traj.xyz.reshape(new_shape)
# TODO: define preprocessing step (RMSD etc.) | [featurizer] flatten xyz array to 2d, if no features are selected. | markovmodel_PyEMMA | train | py |
191fe034ed468bda7fc5fdb46bf9db717c87c837 | diff --git a/yotta/lib/target.py b/yotta/lib/target.py
index <HASH>..<HASH> 100644
--- a/yotta/lib/target.py
+++ b/yotta/lib/target.py
@@ -296,7 +296,6 @@ class Target(pack.Pack):
try:
prog_path = os.path.join(builddir, program)
- signal.signal(signal.SIGINT, _ignoreSignal);
cmd = [
os.path.expandvars(string.Template(x).safe_substitute(program=prog_path))
for x in self.description['scripts']['test']
@@ -312,7 +311,5 @@ class Target(pack.Pack):
finally:
if child is not None:
child.terminate()
- # clear the sigint handler
- signal.signal(signal.SIGINT, signal.SIG_DFL);
return | don't ignore ctrl+c while running the test command | ARMmbed_yotta | train | py |
920da7d175398b03598fa74e9c1a465beb8ac736 | diff --git a/oa-basic/lib/omniauth/strategies/http_basic.rb b/oa-basic/lib/omniauth/strategies/http_basic.rb
index <HASH>..<HASH> 100644
--- a/oa-basic/lib/omniauth/strategies/http_basic.rb
+++ b/oa-basic/lib/omniauth/strategies/http_basic.rb
@@ -1,4 +1,4 @@
-require 'restclient'
+require 'rest-client'
require 'omniauth/basic'
module OmniAuth | changed 'restclient' to 'rest-client' to match that library's new require style /HT achiu | omniauth_omniauth | train | rb |
075a352057e7906caf7d0bc5213079268883cab1 | diff --git a/src/Generator/Analyzer/Steps/AbstractProcessStep.php b/src/Generator/Analyzer/Steps/AbstractProcessStep.php
index <HASH>..<HASH> 100644
--- a/src/Generator/Analyzer/Steps/AbstractProcessStep.php
+++ b/src/Generator/Analyzer/Steps/AbstractProcessStep.php
@@ -37,6 +37,10 @@ abstract class AbstractProcessStep extends CzimAbstractProcessStep
*/
protected function fieldNameToDatabaseColumn($field)
{
+ // the PXL CMS Generator is very forgiving when using multiple spaces,
+ // so we need to filter them out here
+ $field = preg_replace('#\s+#', ' ', $field);
+
return str_replace(' ', '_', trim(strtolower($field)));
} | fixed bug where CMS field names have double spaces | czim_laravel-pxlcms | train | php |
97594cde61726496306b9773ff5fa7debd472cca | diff --git a/src/Kernel/AccessToken.php b/src/Kernel/AccessToken.php
index <HASH>..<HASH> 100644
--- a/src/Kernel/AccessToken.php
+++ b/src/Kernel/AccessToken.php
@@ -197,7 +197,7 @@ abstract class AccessToken implements AccessTokenInterface
}
/**
- * @return mixed|string
+ * @return string
*
* @throws \EasyWeChat\Kernel\Exceptions\InvalidArgumentException
*/
diff --git a/src/Kernel/BaseClient.php b/src/Kernel/BaseClient.php
index <HASH>..<HASH> 100644
--- a/src/Kernel/BaseClient.php
+++ b/src/Kernel/BaseClient.php
@@ -150,7 +150,7 @@ class BaseClient
* @param array $options
* @param bool $returnRaw
*
- * @return \Psr\Http\Message\ResponseInterface|\EasyWeChat\Kernel\Support\Collection|array|object|string
+ * @return \GuzzleHttp\Psr7\Response
*/
public function request(string $url, string $method = 'GET', array $options = [], $returnRaw = false)
{ | Scrutinizer Auto-Fixes (#<I>)
This commit consists of patches automatically generated for this project on <URL> | overtrue_wechat | train | php,php |
f7bea27de3d3f374003bf262e8f847be16703fb3 | diff --git a/src/main/java/stormpot/whirlpool/Request.java b/src/main/java/stormpot/whirlpool/Request.java
index <HASH>..<HASH> 100644
--- a/src/main/java/stormpot/whirlpool/Request.java
+++ b/src/main/java/stormpot/whirlpool/Request.java
@@ -1,6 +1,6 @@
package stormpot.whirlpool;
-public class Request {
+class Request {
private static ThreadLocal<Request> requestRef = new ThreadLocal<Request>();
public static Request get() {
@@ -16,4 +16,8 @@ public class Request {
return true;
}
+ static void clear() {
+ requestRef.set(null);
+ }
+
}
diff --git a/src/test/java/stormpot/whirlpool/RequestTest.java b/src/test/java/stormpot/whirlpool/RequestTest.java
index <HASH>..<HASH> 100644
--- a/src/test/java/stormpot/whirlpool/RequestTest.java
+++ b/src/test/java/stormpot/whirlpool/RequestTest.java
@@ -43,6 +43,13 @@ public class RequestTest {
getMustReturnActiveRequest() {
assertTrue(Request.get().active());
}
- // TODO get must return active request
+
+ @Test public void
+ clearMustResetAssignedRequest() {
+ Request r1 = Request.get();
+ Request.clear();
+ Request r2 = Request.get();
+ assertTrue(r1 != r2);
+ }
// TODO get must allocate new request if existing is inactive
} | not sure what I need Request.clear for, but now it is there. | chrisvest_stormpot | train | java,java |
daa801b9ff8c81e6812a08a3f6ef82f593248e9d | diff --git a/lib/puppet/application/agent.rb b/lib/puppet/application/agent.rb
index <HASH>..<HASH> 100644
--- a/lib/puppet/application/agent.rb
+++ b/lib/puppet/application/agent.rb
@@ -219,6 +219,10 @@ class Puppet::Application::Agent < Puppet::Application
Puppet.settings.use :main, :agent, :ssl
+ # Always ignoreimport for agent. It really shouldn't even try to import,
+ # but this is just a temporary band-aid.
+ Puppet[:ignoreimport] = true
+
# We need to specify a ca location for all of the SSL-related i
# indirected classes to work; in fingerprint mode we just need
# access to the local files and we don't need a ca. | [#<I>] Temporary fix to stop agent from importing modules
Due to type collection madness, agent tries to import modules
to resolve resource types. That is wrong, decreases performance,
and causes problems. This patch forces agent to not import any
files by setting ignoreimport to true. | puppetlabs_puppet | train | rb |
34345affbae8530478d90e654c4a00e2e1f2a8d6 | diff --git a/pyes/es.py b/pyes/es.py
index <HASH>..<HASH> 100644
--- a/pyes/es.py
+++ b/pyes/es.py
@@ -368,7 +368,8 @@ class ES(object):
If `indices` is not supplied, returns the default_indices.
"""
- indices = indices or self.default_indices
+ if indices is None:
+ return self.default_indices
if isinstance(indices, basestring):
indices = [indices]
return indices
@@ -384,6 +385,17 @@ class ES(object):
curl_cmd += " -d '%s'" % request.body
print >> self.dump_curl, curl_cmd
+ def _get_default_indices(self):
+ return self._default_indices
+
+ def _set_default_indices(self, default_indices):
+ if default_indices is not None:
+ default_indices = self._validate_indices(default_indices)
+ self._default_indices = default_indices
+
+ default_indices = property(_get_default_indices, _set_default_indices)
+ del _get_default_indices, _set_default_indices
+
#---- Admin commands
def status(self, indices=None):
""" | Changed ES.default_indices to a property that calls _validate_indices() on set | aparo_pyes | train | py |
4491f993239274c32ff99249cc71d888e91a2138 | diff --git a/src/Neevo/Manager.php b/src/Neevo/Manager.php
index <HASH>..<HASH> 100644
--- a/src/Neevo/Manager.php
+++ b/src/Neevo/Manager.php
@@ -172,6 +172,9 @@ class Manager implements IObservable, IObserver {
$count++;
}
}
+ if($sql !== ''){
+ $this->connection->getDriver()->runQuery($sql);
+ }
fclose($handle);
ignore_user_abort($abort);
return $count; | fix for loadFile() if not ending with semicolon | smasty_Neevo | train | php |
126f09111354febce5f6608928326b07a2ae4f33 | diff --git a/src/main/java/net/openhft/chronicle/hash/impl/util/CanonicalRandomAccessFiles.java b/src/main/java/net/openhft/chronicle/hash/impl/util/CanonicalRandomAccessFiles.java
index <HASH>..<HASH> 100644
--- a/src/main/java/net/openhft/chronicle/hash/impl/util/CanonicalRandomAccessFiles.java
+++ b/src/main/java/net/openhft/chronicle/hash/impl/util/CanonicalRandomAccessFiles.java
@@ -71,4 +71,6 @@ public final class CanonicalRandomAccessFiles {
}
});
}
+
+ private CanonicalRandomAccessFiles() {}
} | Add missing private constructor to CanonicalRandomAccessFiles to prohibit instantiation of this class | OpenHFT_Chronicle-Map | train | java |
b4ce3c7fae13b2f8cf378f4b3e4608528858f8c7 | diff --git a/glances/core/glances_autodiscover.py b/glances/core/glances_autodiscover.py
index <HASH>..<HASH> 100644
--- a/glances/core/glances_autodiscover.py
+++ b/glances/core/glances_autodiscover.py
@@ -204,6 +204,10 @@ class GlancesAutoDiscoverClient(object):
else:
logger.error("Couldn't find the active IP address: netifaces library not found.")
+ # Correct issue #528 (no network interface available)
+ if zeroconf_bind_address is None:
+ zeroconf_bind_address == '0.0.0.0'
+
logger.info("Announce the Glances server on the LAN (using {0} IP address)".format(zeroconf_bind_address))
print("Announce the Glances server on the LAN (using {0} IP address)".format(zeroconf_bind_address)) | Correct server mode issue when no network interface is available (issue #<I>) | nicolargo_glances | train | py |
0328485c9f9d652c560c8500f3a5e09676d38918 | diff --git a/p2p/security/tls/transport.go b/p2p/security/tls/transport.go
index <HASH>..<HASH> 100644
--- a/p2p/security/tls/transport.go
+++ b/p2p/security/tls/transport.go
@@ -6,6 +6,7 @@ import (
"errors"
"net"
"os"
+ "sync"
ci "github.com/libp2p/go-libp2p-core/crypto"
"github.com/libp2p/go-libp2p-core/peer"
@@ -81,9 +82,19 @@ func (t *Transport) handshake(
tlsConn.Close()
default:
}
+
done := make(chan struct{})
+ var wg sync.WaitGroup
+
+ // Ensure that we do not return before
+ // either being done or having a context
+ // cancellation.
+ defer wg.Wait()
defer close(done)
+
+ wg.Add(1)
go func() {
+ defer wg.Done()
select {
case <-done:
case <-ctx.Done(): | Fix: Connection Closed after handshake
The context-cancelled watchdog goroutine may start running way after the
handshake has finished and the associated context has been cancelled (by the
executeDial() function in go-libp2p-swarm usuaully).
This results in the connection being closed right after being stablished. | libp2p_go-libp2p | train | go |
d2948c47394681e1345225175f0a295acb486b63 | diff --git a/libfs/file.go b/libfs/file.go
index <HASH>..<HASH> 100644
--- a/libfs/file.go
+++ b/libfs/file.go
@@ -218,11 +218,26 @@ func (f *File) Unlock() (err error) {
return f.fs.config.MDServer().ReleaseLock(f.fs.ctx,
f.fs.root.GetFolderBranch().Tlf, f.getLockID())
}
- return jServer.FinishSingleOp(f.fs.ctx,
+ err = jServer.FinishSingleOp(f.fs.ctx,
f.fs.root.GetFolderBranch().Tlf, &keybase1.LockContext{
RequireLockID: f.getLockID(),
ReleaseAfterSuccess: true,
}, f.fs.priority)
+ if err != nil {
+ return err
+ }
+
+ if f.fs.config.Mode() != libkbfs.InitSingleOp {
+ f.fs.log.CDebugf(f.fs.ctx, "Releasing the lock")
+
+ // Need to explicitly release the lock from the server.
+ err = f.fs.config.MDServer().ReleaseLock(
+ f.fs.ctx, f.fs.root.GetFolderBranch().Tlf, f.getLockID())
+ if err != nil {
+ return err
+ }
+ }
+ return nil
}
// Truncate implements the billy.File interface for File. | libfs: release lock explicitly if not in single op mode.
Since libgit.Autogit.Clone() can be run in non-single-op mode, and it
takes a lock, we need to have `File.Unlock()` explicitly release the
lock after flushing the journal.
Issue: KBFS-<I> | keybase_client | train | go |
fbea1ece2e9354deab89e2f05f39e0efec4c0336 | diff --git a/python/ray/actor.py b/python/ray/actor.py
index <HASH>..<HASH> 100644
--- a/python/ray/actor.py
+++ b/python/ray/actor.py
@@ -598,6 +598,9 @@ class ActorHandle(object):
# The last object returned is the dummy object that should be
# passed in to the next actor method. Do not return it to the user.
self._ray_actor_cursor = object_ids.pop()
+ # We have notified the backend of the new actor handles to expect
+ # since the last task was submitted, so clear the list.
+ self._ray_new_actor_handles = []
if len(object_ids) == 1:
object_ids = object_ids[0] | Clear new actor handle list after submitting task. (#<I>) | ray-project_ray | train | py |
c28ba9a2ee3235b81017798c8c4748e9335488b3 | diff --git a/lib/selectors/source-map-stringifier.js b/lib/selectors/source-map-stringifier.js
index <HASH>..<HASH> 100644
--- a/lib/selectors/source-map-stringifier.js
+++ b/lib/selectors/source-map-stringifier.js
@@ -10,7 +10,6 @@ function Rebuilder(options, restoreCallback, inputMapTracker) {
this.line = 1;
this.output = [];
this.keepBreaks = options.keepBreaks;
- this.relativeTo = options.relativeTo;
this.restore = restoreCallback;
this.inputMapTracker = inputMapTracker;
this.outputMap = new SourceMapGenerator();
@@ -32,9 +31,7 @@ Rebuilder.prototype.relativePathResolver = function (sourcePath, sourceRelativeT
if (sourceRelativeTo)
sourcePath = path.resolve(path.dirname(sourceRelativeTo), sourcePath);
- return path.normalize(sourcePath) === path.resolve(sourcePath) ?
- path.relative(this.rebaseTo, sourcePath) :
- path.relative(this.rebaseTo, path.join(this.relativeTo, sourcePath));
+ return path.relative(this.rebaseTo, sourcePath);
};
Rebuilder.prototype.rebuildValue = function (list, separator) { | Simplifies resolving paths in source map stringifier. | jakubpawlowicz_clean-css | train | js |
ee01a5f66ffba7115939b72ac70530f387e51524 | diff --git a/cmd/influxd/server_integration_test.go b/cmd/influxd/server_integration_test.go
index <HASH>..<HASH> 100644
--- a/cmd/influxd/server_integration_test.go
+++ b/cmd/influxd/server_integration_test.go
@@ -581,6 +581,16 @@ func runTestsData(t *testing.T, testName string, nodes Cluster, database, retent
// Aggregations
{
reset: true,
+ name: "stddev with just one point",
+ write: `{"database" : "%DB%", "retentionPolicy" : "%RP%", "points": [
+ {"name": "cpu", "timestamp": "2015-04-20T14:27:41Z", "fields": {"value": 45}}
+ ]}`,
+ query: `SELECT stddev(value) FROM cpu`,
+ queryDb: "%DB%",
+ expected: `{"results":[{"series":[{"name":"cpu","columns":["time","stddev"],"values":[["1970-01-01T00:00:00Z",null]]}]}]}`,
+ },
+ {
+ reset: true,
name: "large mean and stddev",
write: `{"database" : "%DB%", "retentionPolicy" : "%RP%", "points": [
{"name": "cpu", "timestamp": "2015-04-20T14:27:40Z", "fields": {"value": ` + string(maxFloat64) + `}}, | add test for stddev on 1 point | influxdata_influxdb | train | go |
b80c264e6be52370a82f1707051617940b9ff939 | diff --git a/py3status/modules/kdeconnector.py b/py3status/modules/kdeconnector.py
index <HASH>..<HASH> 100644
--- a/py3status/modules/kdeconnector.py
+++ b/py3status/modules/kdeconnector.py
@@ -82,12 +82,12 @@ class Py3status:
self.device_id = self._get_device_id(_bus)
if self.device_id is None:
return False
- else:
- try:
- self._dev = _bus.get(SERVICE_BUS,
- DEVICE_PATH + '/%s' % self.device_id)
- except Exception:
- return False
+
+ try:
+ self._dev = _bus.get(SERVICE_BUS,
+ DEVICE_PATH + '/%s' % self.device_id)
+ except Exception:
+ return False
return True
@@ -102,7 +102,6 @@ class Py3status:
return devices[0]
for id in devices:
- self._dev = bus.get(SERVICE_BUS, DEVICE_PATH + '/%s' % id)
if self.device == self._dev.name:
return id | Fix a bug which occurred when no device id or device name was given | ultrabug_py3status | train | py |
ff07b23a869d2574a2e2049f41f1e1807609b3e0 | diff --git a/clients/python/pycellbase/cbconfig.py b/clients/python/pycellbase/cbconfig.py
index <HASH>..<HASH> 100755
--- a/clients/python/pycellbase/cbconfig.py
+++ b/clients/python/pycellbase/cbconfig.py
@@ -31,7 +31,7 @@ class ConfigClient(object):
config_dict = json.loads(config_fhand.read())
if config_dict is not None:
- if 'host' in config_dict['rest']:
+ if 'hosts' in config_dict['rest']:
self._hosts = config_dict['rest']['hosts']
self._config['host'] = self._get_available_host()
if 'version' in config_dict: | Fixed bug where config file info was not retrieved correctly | opencb_cellbase | train | py |
311cdbdcab8200d55186a42e9f598d52df18caba | diff --git a/terraform/eval_diff.go b/terraform/eval_diff.go
index <HASH>..<HASH> 100644
--- a/terraform/eval_diff.go
+++ b/terraform/eval_diff.go
@@ -775,33 +775,6 @@ func (n *EvalDiffDestroy) Eval(ctx EvalContext) (interface{}, error) {
return nil, nil
}
-// EvalDiffDestroyModule is an EvalNode implementation that writes the diff to
-// the full diff.
-type EvalDiffDestroyModule struct {
- Path addrs.ModuleInstance
-}
-
-// TODO: test
-func (n *EvalDiffDestroyModule) Eval(ctx EvalContext) (interface{}, error) {
- return nil, fmt.Errorf("EvalDiffDestroyModule not yet updated for new plan types")
- /*
- diff, lock := ctx.Diff()
-
- // Acquire the lock so that we can do this safely concurrently
- lock.Lock()
- defer lock.Unlock()
-
- // Write the diff
- modDiff := diff.ModuleByPath(n.Path)
- if modDiff == nil {
- modDiff = diff.AddModule(n.Path)
- }
- modDiff.Destroy = true
-
- return nil, nil
- */
-}
-
// EvalReduceDiff is an EvalNode implementation that takes a planned resource
// instance change as might be produced by EvalDiff or EvalDiffDestroy and
// "simplifies" it to a single atomic action to be performed by a specific | core: Remove unused EvalDiffDestroyModule
This is no longer needed because the state structure self-prunes when
a module becomes empty. | hashicorp_terraform | train | go |
ef12ba00eed90e4d5ae3072a5a03f44a4eee78c3 | diff --git a/tests/junit/org/jgroups/tests/FlushCloseOpenTest.java b/tests/junit/org/jgroups/tests/FlushCloseOpenTest.java
index <HASH>..<HASH> 100644
--- a/tests/junit/org/jgroups/tests/FlushCloseOpenTest.java
+++ b/tests/junit/org/jgroups/tests/FlushCloseOpenTest.java
@@ -42,7 +42,7 @@ public class FlushCloseOpenTest extends ChannelTestBase {
channel.stopFlush();
channel.close();
- channel = createChannel((JChannel) channel);
+ channel = createChannel((JChannel) channel2);
channel.setReceiver(receiver);
channel.connect("testClust");
sendMessage(channel2, "msg3");
@@ -50,7 +50,9 @@ public class FlushCloseOpenTest extends ChannelTestBase {
channel2.startFlush(false);
assertCount(receiver, 3, receiver2, 3);
channel.stopFlush();
- channel2.disconnect();
+ channel2.close();
+ channel2 = createChannel((JChannel) channel);
+ channel2.setReceiver(receiver2);
channel2.connect("testClust");
sendMessage(channel2, "msg4"); | test rewrite that uses createChannel properly | belaban_JGroups | train | java |
920566c9b4b095d01fb538f739b184bad5e53de7 | diff --git a/demos/plane/plane.js b/demos/plane/plane.js
index <HASH>..<HASH> 100644
--- a/demos/plane/plane.js
+++ b/demos/plane/plane.js
@@ -65,7 +65,7 @@ Plane.LANGUAGE_NAME = {
'tr': 'Türkçe',
'uk': 'Українська',
'vi': 'Tiếng Việt',
- 'zh-hans': '簡體中文',
+ 'zh-hans': '简体中文',
'zh-hant': '正體中文'
}; | fixed an translation error
change "簡體中文" to "简体中文" | LLK_scratch-blocks | train | js |
0781b005590a3becaa9e12499e44b7b37420e450 | diff --git a/tests/climodule/cli/BasicOperationsCest.php b/tests/climodule/cli/BasicOperationsCest.php
index <HASH>..<HASH> 100644
--- a/tests/climodule/cli/BasicOperationsCest.php
+++ b/tests/climodule/cli/BasicOperationsCest.php
@@ -22,6 +22,7 @@ class BasicOperationsCest
public function it_should_allow_creating_a_post_in_the_word_press_installation(ClimoduleTester $I)
{
$I->cli('post create --post_title="Some Post" --post_type=post');
+ $I->cli('post list --post_type=post');
$I->seePostInDatabase(['post_title' => 'Some Post', 'post_type' => 'post']);
} | tests(climodule) list posts in post creation test | lucatume_wp-browser | train | php |
fed3f5ccc390595afa24dfa5e0dd113c030dea60 | diff --git a/autolens/pipeline/tagging.py b/autolens/pipeline/tagging.py
index <HASH>..<HASH> 100644
--- a/autolens/pipeline/tagging.py
+++ b/autolens/pipeline/tagging.py
@@ -27,6 +27,9 @@ def pipeline_tag_from_pipeline_settings(
align_bulge_disk_axis_ratio=align_bulge_disk_axis_ratio,
align_bulge_disk_phi=align_bulge_disk_phi)
+ pixelization_tag = ''
+ regularization_tag = ''
+
return fix_lens_light_tag + pixelization_tag + regularization_tag + bulge_disk_tag
def fix_lens_light_tag_from_fix_lens_light(fix_lens_light): | Removing tags of pixelization in dummy release for Amy | Jammy2211_PyAutoLens | train | py |
0d36ebbdf5b742501df4fde3a73bba83f9929a7a | diff --git a/koala/Range.py b/koala/Range.py
index <HASH>..<HASH> 100644
--- a/koala/Range.py
+++ b/koala/Range.py
@@ -543,14 +543,20 @@ class RangeCore(dict):
@staticmethod
def is_superior_or_equal(a, b):
try:
- return check_value(a) >= check_value(b)
+ a = check_value(a)
+ b = check_value(b)
+
+ return a > b or is_almost_equal(a, b)
except Exception as e:
return ExcelError('#N/A', e)
@staticmethod
def is_inferior_or_equal(a, b):
try:
- return check_value(a) <= check_value(b)
+ a = check_value(a)
+ b = check_value(b)
+
+ return a < b or is_almost_equal(a, b)
except Exception as e:
return ExcelError('#N/A', e) | Range.>= and Range.<= use is_almost_equal() | anthill_koala | train | py |
611513acb2b3326d13b2997a7b476ac91b9d92af | diff --git a/pyout.py b/pyout.py
index <HASH>..<HASH> 100644
--- a/pyout.py
+++ b/pyout.py
@@ -287,6 +287,13 @@ def _adopt(style, new_style):
return {key: dict(style[key], **new_style.get(key, {})) for key in style}
+def _safe_get(mapping, key, default=None):
+ try:
+ return mapping.get(key, default)
+ except AttributeError:
+ return default
+
+
class Tabular(object):
"""Interface for writing and updating styled terminal output. | Add a helper for swallowing *.get attribute errors
This will be used when dealing with style keys that can either be a
dictionary or a simple type like a string or number. | pyout_pyout | train | py |
b4e25143dcbe6f17810d2bbd7e2f0fd00de48cb2 | diff --git a/lib/vagrant/util.rb b/lib/vagrant/util.rb
index <HASH>..<HASH> 100644
--- a/lib/vagrant/util.rb
+++ b/lib/vagrant/util.rb
@@ -3,16 +3,15 @@ module Vagrant
def self.included(base)
base.extend Vagrant::Util
end
-
+
def error_and_exit(error)
- puts <<-error
+ abort <<-error
=====================================================================
Vagrant experienced an error!
#{error.chomp}
=====================================================================
error
- exit
end
def logger | error_and_exit now uses `abort`, which prints to stdout before exiting | hashicorp_vagrant | train | rb |
3625bf6400a7b2cffed7a7ad9d59ae4d98cc6089 | diff --git a/lib/how_is/builds.rb b/lib/how_is/builds.rb
index <HASH>..<HASH> 100644
--- a/lib/how_is/builds.rb
+++ b/lib/how_is/builds.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require "how_is/fetcher"
+require "tessellator/fetcher"
class HowIs
# Fetches metadata about CI builds. | how the everloving fuck did this work before? | duckinator_inq | train | rb |
12b05783084d79621efd4f0b0314ced24889950d | diff --git a/windpowerlib/wind_turbine.py b/windpowerlib/wind_turbine.py
index <HASH>..<HASH> 100644
--- a/windpowerlib/wind_turbine.py
+++ b/windpowerlib/wind_turbine.py
@@ -163,12 +163,12 @@ class WindTurbine(object):
data = np.delete(data, 0, 0)
df = pd.DataFrame(data, columns=['v_wind', self.fetch_curve])
df.set_index('v_wind', drop=True, inplace=True)
- nominal_power = wpp_df['p_nom'].iloc[0]
+ nominal_power = wpp_df['p_nom'].iloc[0] * 1000.0
return df, nominal_power
if self.fetch_curve == 'p':
filename = 'p_curves.csv'
p_values, p_nom = restructure_data()
- self.p_values = p_values
+ self.p_values = p_values * 1000.0
else:
filename = 'cp_curves.csv'
self.cp_values, p_nom = restructure_data() | Convert nominal power and p_values in fetch_turbine_data to watt | wind-python_windpowerlib | train | py |
6aa868f26a65db6e69399e83d83034590622fb25 | diff --git a/lib/elastomer/client/cluster.rb b/lib/elastomer/client/cluster.rb
index <HASH>..<HASH> 100644
--- a/lib/elastomer/client/cluster.rb
+++ b/lib/elastomer/client/cluster.rb
@@ -84,6 +84,15 @@ module Elastomer
response.body
end
+ # Returns `true` if there items in the pending task list. Returns `false`
+ # if the pending task list is empty. Returns `nil` if the response body
+ # does not contain the "tasks" field.
+ def pending_tasks?
+ hash = pending_tasks
+ return nil unless hash.key? "tasks"
+ hash["tasks"].length > 0
+ end
+
# Cluster wide settings that have been modified via the update API.
#
# params - Parameters Hash | adding a `pending_tasks?` quick check method | github_elastomer-client | train | rb |
a023de61a792630d47fe7f69cfa042d309a12abd | diff --git a/pkg/addons/addons.go b/pkg/addons/addons.go
index <HASH>..<HASH> 100644
--- a/pkg/addons/addons.go
+++ b/pkg/addons/addons.go
@@ -332,7 +332,9 @@ func Start(wg *sync.WaitGroup, cc *config.ClusterConfig, toEnable map[string]boo
var awg sync.WaitGroup
- out.T(out.AddonEnable, "Enabling addons: {{.addons}}", out.V{"addons": strings.Join(toEnableList, ", ")})
+ defer func() { // making it show after verifications
+ out.T(out.AddonEnable, "Enabling addons: {{.addons}}", out.V{"addons": strings.Join(toEnableList, ", ")})
+ }()
for _, a := range toEnableList {
awg.Add(1)
go func(name string) { | defer printing enabling addons | kubernetes_minikube | train | go |
6bc7a700471882f991bbee7912801cacc7e85d0c | diff --git a/src/Installer/AbstractModuleInstaller.php b/src/Installer/AbstractModuleInstaller.php
index <HASH>..<HASH> 100644
--- a/src/Installer/AbstractModuleInstaller.php
+++ b/src/Installer/AbstractModuleInstaller.php
@@ -452,7 +452,7 @@ abstract class AbstractModuleInstaller extends LibraryInstaller
if (file_exists($target)) {
// Target link already exists and is correct, do nothing
- if (is_link($target) && $source === readlink($target)) {
+ if (is_link($target) && $source === realpath($target)) {
return false;
} | Fixed decting correct symlinks | contao-community-alliance_composer-plugin | train | php |
a6b6cd2b50cc26e201523446ff76a01b02a42689 | diff --git a/backends/graphite.js b/backends/graphite.js
index <HASH>..<HASH> 100644
--- a/backends/graphite.js
+++ b/backends/graphite.js
@@ -316,7 +316,7 @@ exports.init = function graphite_init(startup_time, config, events, logger) {
}
graphiteStats.last_flush = startup_time;
- graphiteStats.last_exception = startup_time;
+ graphiteStats.last_exception = 0;
graphiteStats.flush_time = 0;
graphiteStats.flush_length = 0; | Set last_exception time to 0 on initialisation
When this was being set to the startup_time it was causing confusion
with users of the admin panel, leading people to believe there had
been an exception which wasn't being logged. Since no exceptions are
occuring, a 0 initialisation seems reasonable here. | statsd_statsd | train | js |
d12fecd2eb012862b8d7654c879dccf5ccce833f | diff --git a/jose/backends/__init__.py b/jose/backends/__init__.py
index <HASH>..<HASH> 100644
--- a/jose/backends/__init__.py
+++ b/jose/backends/__init__.py
@@ -2,7 +2,10 @@
try:
from jose.backends.pycrypto_backend import RSAKey
except ImportError:
- from jose.backends.cryptography_backend import CryptographyRSAKey as RSAKey
+ try:
+ from jose.backends.cryptography_backend import CryptographyRSAKey as RSAKey
+ except ImportError:
+ from jose.backends.rsa_backend import RSAKey
try:
from jose.backends.cryptography_backend import CryptographyECKey as ECKey | Enable Python RSA backend as a fallback. | mpdavis_python-jose | train | py |
9dcfcff548f2e101bb5de1c24f9a0629471d11e6 | diff --git a/build-tools/gulp-tasks/build-webpack/webpack.config.js b/build-tools/gulp-tasks/build-webpack/webpack.config.js
index <HASH>..<HASH> 100644
--- a/build-tools/gulp-tasks/build-webpack/webpack.config.js
+++ b/build-tools/gulp-tasks/build-webpack/webpack.config.js
@@ -33,8 +33,6 @@ const defaultConfig = {
// entry: './src/index.js',
entry: {
'critical-fonts': './src/_patterns/02-components/bolt-critical-fonts/src/critical-fonts',
-
- 'critical-fonts': './src/_patterns/02-components/bolt-button/src/button',
// './src/components/bolt-icon/dist/icon': [
// // './src/scripts/native-shim.js', //Wrapper for custom-elements-es5-adapter.js so this doesn't break in other browsers like IE11
// // './node_modules/@webcomponents/webcomponentsjs/webcomponents-lite.js', | fix: revert webpack config update -- no longer needed. | bolt-design-system_bolt | train | js |
b09e1c0f18a7845f5eb287d4768f021b0df9569f | diff --git a/carrot/backends/base.py b/carrot/backends/base.py
index <HASH>..<HASH> 100644
--- a/carrot/backends/base.py
+++ b/carrot/backends/base.py
@@ -93,6 +93,7 @@ class BaseMessage(object):
class BaseBackend(object):
"""Base class for backends."""
+ default_port = None
def __init__(self, connection, **kwargs):
self.connection = connection
@@ -128,6 +129,11 @@ class BaseBackend(object):
"""Acknowledge the message."""
pass
+ def queue_purge(self, queue, **kwargs):
+ """Discard all messages in the queue. This will delete the messages
+ and results in an empty queue."""
+ return 0
+
def reject(self, delivery_tag):
"""Reject the message."""
pass
@@ -159,3 +165,7 @@ class BaseBackend(object):
def establish_connection(self):
"""Establish a connection to the backend."""
pass
+
+ def close_connection(self, connection):
+ """Close the connection."""
+ pass | BaseBackend.queue_purge now always returns 0 + new method:
close_connection(connection) to let the backend handle closing a backend connection
object. | ask_carrot | train | py |
30df4089ae0af774a3e4cccab636e94ad7a86680 | diff --git a/documentcloud.py b/documentcloud.py
index <HASH>..<HASH> 100644
--- a/documentcloud.py
+++ b/documentcloud.py
@@ -63,6 +63,24 @@ class Document(BaseAPIObject):
response = urllib2.urlopen(req)
return response.read()
full_text = property(get_full_text)
+
+ def get_small_image_url(self, page=1):
+ template = self.resources.page.get('image')
+ url = template.replace("{page}", str(page)).replace("{size}", "small")
+ return url
+ small_image_url = property(get_small_image_url)
+
+ def get_thumbnail_image_url(self, page=1):
+ template = self.resources.page.get('image')
+ url = template.replace("{page}", str(page)).replace("{size}", "thumbnail")
+ return url
+ small_thumbnail_url = property(get_small_thumbnail_url)
+
+ def get_large_image_url(self, page=1):
+ template = self.resources.page.get('image')
+ url = template.replace("{page}", str(page)).replace("{size}", "large")
+ return url
+ small_large_url = property(get_small_large_url)
class Project(BaseAPIObject):
@@ -154,7 +172,7 @@ if __name__ == '__main__':
obj = document_list[0]
pprint(obj.__dict__)
pprint(obj.resources.__dict__)
- print obj.text
+ print obj.small_image_url | Added methods for pull the image urls by page, with 1 being the default | datadesk_python-documentcloud | train | py |
a54cb6d9eb65cf7f33df310ea373296fe7c29b5f | diff --git a/openxmllib/utils.py b/openxmllib/utils.py
index <HASH>..<HASH> 100644
--- a/openxmllib/utils.py
+++ b/openxmllib/utils.py
@@ -57,10 +57,10 @@ class IndexableTextExtractor(object):
def addTextElement(self, element_name):
"""Adding an element that may contanin text to index
@param element_name: an element that contains text to extract.
- the name may be prefixed with a key from namespaces.ns_map
+ the name may be prefixed with a key from namespaces.ns_map
"""
- self.text_elts_xpaths.append(etree.XPath('//' + element_name, ns_map))
+ self.text_elts_xpaths.append(etree.XPath('//' + element_name, namespaces=ns_map))
return | 'namespaces' is a named param of lxml.etree.XPath constructor. | glenfant_openxmllib | train | py |
80c349af7d4b08636d60d67c943e1c5b8bb39cc6 | diff --git a/src/Bridge/Symfony/Validator/Metadata/Property/ValidatorPropertyMetadataFactory.php b/src/Bridge/Symfony/Validator/Metadata/Property/ValidatorPropertyMetadataFactory.php
index <HASH>..<HASH> 100644
--- a/src/Bridge/Symfony/Validator/Metadata/Property/ValidatorPropertyMetadataFactory.php
+++ b/src/Bridge/Symfony/Validator/Metadata/Property/ValidatorPropertyMetadataFactory.php
@@ -108,9 +108,7 @@ final class ValidatorPropertyMetadataFactory implements PropertyMetadataFactoryI
}
}
- $propertyMetadata = $propertyMetadata->withIri($iri);
-
- return $propertyMetadata->withRequired($required ?? false);
+ return $propertyMetadata->withIri($iri)->withRequired($required ?? false);
}
/** | doc (external-vocabularies): guess IRI from validation constraints (code review) | api-platform_core | train | php |
2aa793cc2e20bc8485713b6d695d9e7a62202ea8 | diff --git a/lib/rbbt/workflow/util/provenance.rb b/lib/rbbt/workflow/util/provenance.rb
index <HASH>..<HASH> 100644
--- a/lib/rbbt/workflow/util/provenance.rb
+++ b/lib/rbbt/workflow/util/provenance.rb
@@ -78,6 +78,7 @@ class Step
name = info[:name] || File.basename(path)
status = :unsync if status == :done and not Open.exist?(path)
status = :notfound if status == :noinfo and not Open.exist?(path)
+
str = " " * offset
str << prov_report_msg(status, name, path, info)
step.dependencies.reverse.each do |dep|
@@ -90,7 +91,7 @@ class Step
if expand_repeats
str << Log.color(:green, Log.uncolor(prov_report(dep, offset+1, task)))
else
- str << Log.color(:green, " " * (offset + 1) + Log.uncolor(prov_report_msg(status, name, path, info)))
+ str << Log.color(:green, " " * (offset + 1) + Log.uncolor(prov_report_msg(dep.status, dep.info[:name], dep.path, dep.info)))
end
end
end if step.dependencies | Fix bug reporting wrong the status of repeated deps | mikisvaz_rbbt-util | train | rb |
cb03a35a3fa13e130d2ab0891eef3e30ed4aafcf | diff --git a/structr-ui/src/main/resources/structr/js/model.js b/structr-ui/src/main/resources/structr/js/model.js
index <HASH>..<HASH> 100644
--- a/structr-ui/src/main/resources/structr/js/model.js
+++ b/structr-ui/src/main/resources/structr/js/model.js
@@ -731,5 +731,7 @@ StructrContent.prototype.append = function(refNode) {
}
_Entities.setMouseOver(div);
+
+ StructrModel.expand(div, this);
}
\ No newline at end of file | expand node if content node is appended | structr_structr | train | js |
00690d57d27aa14053cc9a693c5cf3acb8574136 | diff --git a/src/requirementslib/models/dependency.py b/src/requirementslib/models/dependency.py
index <HASH>..<HASH> 100644
--- a/src/requirementslib/models/dependency.py
+++ b/src/requirementslib/models/dependency.py
@@ -314,12 +314,21 @@ class DependencyResolver(object):
elif not isinstance(dep, AbstractDependency):
dep = AbstractDependency.from_requirement(dep)
self.add_abstract_dep(dep)
- for _ in range(max_rounds):
+ for round_ in range(max_rounds):
self.pin_deps()
+ self.pin_history[round_] = self.pinned_deps.copy()
+ previous_round = self.pin_history[round_ - 1]
+ difference = set(self.pin_history[round_]) - set(previous_round)
+ if difference:
+ log("Difference: ")
+ for d in difference:
+ log(format_requirement(d))
+ if not difference and round >= 3:
+ return
if len(self.pinned_deps.keys()) == len(self.dep_dict.keys()):
return
# TODO: Raise a better error.
- raise RuntimeError('cannot resolve after {} rounds'.format(max_rounds))
+ raise RuntimeError("cannot resolve after {} rounds".format(max_rounds))
def get_resolver(sources=None): | Conclude resolution if dependencies are stable | sarugaku_requirementslib | train | py |
8fa39b442ffb31299fe2e477f4376f06987a9d4a | diff --git a/src/Moltin/SDK/Flows.php b/src/Moltin/SDK/Flows.php
index <HASH>..<HASH> 100644
--- a/src/Moltin/SDK/Flows.php
+++ b/src/Moltin/SDK/Flows.php
@@ -155,8 +155,10 @@ class Flows {
protected function _buildArgs($args, $val = true)
{
$string = '';
- foreach ( $args as $key => $value ) {
+ foreach ( $args as $key => $value ) {
if ($key == "value" && $val != true) {
+
+ } else {
if ( $value !== false ) { $string .= $key.'="'.( is_array($value) ? implode(' ', $value) : $value ).'" '; } elseif ($key != "required") { $string .= $key.' '; }
}
} | Don't add value="" for select boxes This could do with tidying up | moltin_php-sdk | train | php |
2481f016f4da79b5d1f3de15376a6c11f823e5fd | diff --git a/lib/rails_semantic_logger/extensions/rails/server.rb b/lib/rails_semantic_logger/extensions/rails/server.rb
index <HASH>..<HASH> 100644
--- a/lib/rails_semantic_logger/extensions/rails/server.rb
+++ b/lib/rails_semantic_logger/extensions/rails/server.rb
@@ -9,7 +9,7 @@ module Rails
def log_to_stdout
wrapped_app # touch the app so the logger is set up
- SemanticLogger.add_appender(io: $stdout, formatter: :color)
+ SemanticLogger.add_appender(io: $stdout, formatter: :color) unless SemanticLogger.appenders.console_output?
end
end
end | Avoid warning when attempting to add a second console appender
Starting a rails server that already features a console appender results in the following warning:
``
<I>-<I>-<I> <I>:<I>:<I> W [<I>:<I>] SemanticLogger::Appenders -- Ignoring attempt to add a second console appender: SemanticLogger::Appender::IO since it would result in duplicate console output.
``
This change fixes it. | rocketjob_rails_semantic_logger | train | rb |
4f42f9a5215811d8e093a146ef02559a9ed3f48a | diff --git a/test/sass/scss/scss_test.rb b/test/sass/scss/scss_test.rb
index <HASH>..<HASH> 100755
--- a/test/sass/scss/scss_test.rb
+++ b/test/sass/scss/scss_test.rb
@@ -1270,4 +1270,29 @@ CSS
foo {color: darken(black, 10%)}
SCSS
end
+
+ # ref: https://github.com/nex3/sass/issues/104
+ def test_no_buffer_overflow
+ template = render <<SCSS
+.aaa {
+ background-color: white;
+}
+.aaa .aaa .aaa {
+ background-color: black;
+}
+.bbb {
+ @extend .aaa;
+}
+.xxx {
+ @extend .bbb;
+}
+.yyy {
+ @extend .bbb;
+}
+.zzz {
+ @extend .bbb;
+}
+SCSS
+ Sass::SCSS::Parser.new(template, "test.scss").parse
+ end
end | Added test for patch to fix <URL> | sass_ruby-sass | train | rb |
45cfb23a65e135f3fbc2cecd757eaca8758ac002 | diff --git a/lib/BlockCypher/Core/BlockCypherLoggingManager.php b/lib/BlockCypher/Core/BlockCypherLoggingManager.php
index <HASH>..<HASH> 100644
--- a/lib/BlockCypher/Core/BlockCypherLoggingManager.php
+++ b/lib/BlockCypher/Core/BlockCypherLoggingManager.php
@@ -108,8 +108,16 @@ class BlockCypherLoggingManager
{
if ($this->isLoggingEnabled) {
$config = BlockCypherConfigManager::getInstance()->getConfigHashmap();
+
+ if (isset($config['mode'])) {
+ $configMode = $config['mode'];
+ } else {
+ // mode has not been configured by user
+ $configMode = null;
+ }
+
// Check if logging in live
- if ($config['mode'] == 'live') {
+ if ($configMode == 'live') {
// Live should not have logging level above INFO.
if ($this->loggingLevel >= BlockCypherLoggingLevel::INFO) {
// If it is at Debug Level, throw an warning in the log. | Fix bug thrown notice when mode has not been defined in config | blockcypher_php-client | train | php |
3d470ec3f2c471a16649473a552880b0601450c9 | diff --git a/tests/com/google/bitcoin/core/BlockTest.java b/tests/com/google/bitcoin/core/BlockTest.java
index <HASH>..<HASH> 100644
--- a/tests/com/google/bitcoin/core/BlockTest.java
+++ b/tests/com/google/bitcoin/core/BlockTest.java
@@ -58,7 +58,7 @@ public class BlockTest {
@Test
public void testDate() throws Exception {
Block block = new Block(params, blockBytes);
- assertEquals("Thu Nov 04 17:06:04 CET 2010", block.getTime().toString());
+ assertEquals("4 Nov 2010 16:06:04 GMT", block.getTime().toGMTString());
}
@Test | Make BlockTest.testDate pass outside of CET. | bitcoinj_bitcoinj | train | java |
365b8ae1e1ff9720b374babb1788023948e05778 | diff --git a/GPy/inference/latent_function_inference/laplace.py b/GPy/inference/latent_function_inference/laplace.py
index <HASH>..<HASH> 100644
--- a/GPy/inference/latent_function_inference/laplace.py
+++ b/GPy/inference/latent_function_inference/laplace.py
@@ -56,11 +56,8 @@ class Laplace(object):
#Compute hessian and other variables at mode
log_marginal, woodbury_vector, woodbury_inv, dL_dK, dL_dthetaL = self.mode_computations(f_hat, Ki_fhat, K, Y, likelihood, kern, Y_metadata)
- kern.update_gradients_full(dL_dK, X)
- likelihood.update_gradients(dL_dthetaL)
-
self._previous_Ki_fhat = Ki_fhat.copy()
- return Posterior(woodbury_vector=woodbury_vector, woodbury_inv=woodbury_inv, K=K), log_marginal, {'dL_dK':dL_dK}
+ return Posterior(woodbury_vector=woodbury_vector, woodbury_inv=woodbury_inv, K=K), log_marginal, {'dL_dK':dL_dK, 'dL_dthetaL':dL_dthetaL}
def rasm_mode(self, K, Y, likelihood, Ki_f_init, Y_metadata=None):
""" | more chancges to laplace | SheffieldML_GPy | train | py |
1d8837009d13a72bd05792c2325e352284e2bcb1 | diff --git a/src/shuffle.js b/src/shuffle.js
index <HASH>..<HASH> 100644
--- a/src/shuffle.js
+++ b/src/shuffle.js
@@ -603,7 +603,15 @@ Shuffle.prototype._doesPassFilter = function( category, $item ) {
var keys = this.delimeter && !$.isArray( groups ) ?
groups.split( this.delimeter ) :
groups;
- return $.inArray(category, keys) > -1;
+ var categories = [];
+ categories = categories.concat(category);
+ for (var i = 0; i < categories.length; i++) {
+ var categoryIsInKeys = $.inArray(categories[i], keys) > -1;
+ if(!categoryIsInKeys) {
+ return false;
+ }
+ }
+ return true;
}
}; | filter items that have every category in keys | Vestride_Shuffle | train | js |
09bafdd8e8285e810638e55858d888d32e4e3164 | diff --git a/Swat/SwatFileEntry.php b/Swat/SwatFileEntry.php
index <HASH>..<HASH> 100644
--- a/Swat/SwatFileEntry.php
+++ b/Swat/SwatFileEntry.php
@@ -370,14 +370,15 @@ class SwatFileEntry extends SwatInputControl
// {{{ public static function getMaximumFileUploadSize()
/**
- * Returns the size (in bytes) of the upload size limit of the php
- * configuration.
+ * Returns the size (in bytes) of the upload size limit of the PHP
+ * configuration
*
* The maximum upload size is calculated based on the php ini values for
- * upload_max_filesize and post_max_size. Be aware that web server and POST
- * data settings can also effect upload size limits.
+ * <code>upload_max_filesize</code> and <code>post_max_size</code>. Be
+ * aware that web server and POST data settings can also affect the
+ * maximum upload size limit.
*
- * @return integer The maximum upload size in bytes.
+ * @return integer the maximum upload size in bytes.
*/
public static function getMaximumFileUploadSize()
{ | Clean up the already good documentation.
svn commit r<I> | silverorange_swat | train | php |
75ce3ba4341845a4d8f3129311532db808a7e11e | diff --git a/lib/nissh/mock_session.rb b/lib/nissh/mock_session.rb
index <HASH>..<HASH> 100644
--- a/lib/nissh/mock_session.rb
+++ b/lib/nissh/mock_session.rb
@@ -7,8 +7,11 @@ module Nissh
class MockSession
+ attr_reader :executed_commands
+
def initialize
@mocked_commands = {}
+ @executed_commands = []
end
def command(matcher, &block)
@@ -56,6 +59,9 @@ module Nissh
end
end
+ def close
+ end
+
private
def match_command(commands)
@@ -64,6 +70,7 @@ module Nissh
for matcher, mocked_command in @mocked_commands
if (matcher.is_a?(Regexp) ? matcher =~ command : matcher == command)
+ @executed_commands << command
return mocked_command
end
end
diff --git a/lib/nissh/session.rb b/lib/nissh/session.rb
index <HASH>..<HASH> 100644
--- a/lib/nissh/session.rb
+++ b/lib/nissh/session.rb
@@ -21,6 +21,10 @@ module Nissh
end
end
+ def close
+ @session.close rescue nil
+ end
+
def execute!(commands, options = {})
unless commands.is_a?(Array)
commands = [commands] | store all executed commands and add close method | adamcooke_nissh | train | rb,rb |
00279d8a06bd1f52e9b8e70896585bd16d88e430 | diff --git a/lib/less/node/dimension.js b/lib/less/node/dimension.js
index <HASH>..<HASH> 100644
--- a/lib/less/node/dimension.js
+++ b/lib/less/node/dimension.js
@@ -17,7 +17,7 @@ tree.Dimension.prototype = {
operate: function (op, other) {
return new(tree.Dimension)
(tree.operate(op, this.value, other.value),
- this.unit);
+ this.unit || other.unit);
}
}; | in an operation, inherit the unit of either operand | less_less.js | train | js |
088830c0bb83404be4826c19db3c1db0fc915d7e | diff --git a/lib/plugins/datacollector/dataCollector.js b/lib/plugins/datacollector/dataCollector.js
index <HASH>..<HASH> 100644
--- a/lib/plugins/datacollector/dataCollector.js
+++ b/lib/plugins/datacollector/dataCollector.js
@@ -8,13 +8,9 @@ const fs = require('fs'),
Promise.promisifyAll(fs);
-const TIME_FORMAT = 'YYYY-MM-DD HH:mm:ss';
-
class DataCollector {
- constructor(context, options) {
+ constructor(context) {
this.storageManager = context.storageManager;
- this.timestamp = context.timestamp.format(TIME_FORMAT);
- this.options = options;
this.urlRunPages = context.dataCollection.urlRunPages;
this.urlPages = context.dataCollection.urlPages;
this.summaryPages = context.dataCollection.summaryPages;
diff --git a/lib/plugins/datacollector/index.js b/lib/plugins/datacollector/index.js
index <HASH>..<HASH> 100644
--- a/lib/plugins/datacollector/index.js
+++ b/lib/plugins/datacollector/index.js
@@ -10,9 +10,8 @@ module.exports = {
return path.basename(__dirname);
},
- open(context, options) {
- this.dataCollector = new DataCollector(context, options);
- this.options = options;
+ open(context) {
+ this.dataCollector = new DataCollector(context);
},
processMessage(message) { | Remove unused code from DataCollector. | sitespeedio_sitespeed.io | train | js,js |
4cc87cb68db95e6a84c838dd066bfab8ddcb5d9c | diff --git a/services/datalad/datalad_service/common/s3.py b/services/datalad/datalad_service/common/s3.py
index <HASH>..<HASH> 100644
--- a/services/datalad/datalad_service/common/s3.py
+++ b/services/datalad/datalad_service/common/s3.py
@@ -92,7 +92,7 @@ def validate_s3_config(dataset_path, realm):
# get annex options for s3 bucket
try:
remote_log = subprocess.run(['git', 'cat-file', '-p', 'git-annex:remote.log'],
- cwd=dataset_path, capture_output=True, check=True)
+ cwd=dataset_path, capture_output=True, check=True, encoding='utf-8')
except subprocess.CalledProcessError as err:
if err.returncode == 128:
# git-annex:remote.log is most likely not created yet, skip validation | fix: Correctly decode reading remote.log | OpenNeuroOrg_openneuro | train | py |
438d79e4ef714f637f8f1cb50b01293e5232340a | diff --git a/lib/itamae/resource/file.rb b/lib/itamae/resource/file.rb
index <HASH>..<HASH> 100644
--- a/lib/itamae/resource/file.rb
+++ b/lib/itamae/resource/file.rb
@@ -60,15 +60,6 @@ module Itamae
run_command(["touch", attributes.path])
end
- change_target = @temppath || attributes.path
-
- if attributes.mode
- run_specinfra(:change_file_mode, change_target, attributes.mode)
- end
- if attributes.owner || attributes.group
- run_specinfra(:change_file_owner, change_target, attributes.owner, attributes.group)
- end
-
if @temppath
if run_specinfra(:check_file_is_file, attributes.path)
unless check_command(["diff", "-q", @temppath, attributes.path])
@@ -79,10 +70,20 @@ module Itamae
# new file
updated!
end
+ end
- if updated?
- run_specinfra(:move_file, @temppath, attributes.path)
- end
+ change_target = @temppath && updated? ? @temppath : attributes.path
+
+ if attributes.mode
+ run_specinfra(:change_file_mode, change_target, attributes.mode)
+ end
+
+ if attributes.owner || attributes.group
+ run_specinfra(:change_file_owner, change_target, attributes.owner, attributes.group)
+ end
+
+ if @temppath && updated?
+ run_specinfra(:move_file, @temppath, attributes.path)
end
end | Set mode and owner correctly when file not changed
Previously always mode and owner had set to temppath even if file content is
not changed.
P.S. we need well-covered tests! | itamae-kitchen_itamae | train | rb |
47679e7392d471f4bb2e725f5ab27b82311b25f5 | diff --git a/tldap/base.py b/tldap/base.py
index <HASH>..<HASH> 100644
--- a/tldap/base.py
+++ b/tldap/base.py
@@ -24,7 +24,6 @@ import tldap.modlist
import ldap.dn
-import copy
import sys
default_object_class_field = tldap.fields.CharField(required=True, max_instances=None)
@@ -546,7 +545,7 @@ class LDAPobject(object):
new_key,new_value,_ = split_new_rdn[0][0]
# make a copy before modifications
- self._db_values[using] = copy.copy(self._db_values[using])
+ self._db_values[using] = self._db_values[using].clone()
# delete old rdn attribute in object
old_key = self._meta.get_field_name(old_key) | Use clone() instead of copy module.
Copy module doesn't call constructor of CaseInsensitiveDict, resulting
in random errors because lc is not definied. | Karaage-Cluster_python-tldap | train | py |
ab5fa7b1e2f857afbff7f6a66644c9540104f1a2 | diff --git a/src/ODataResource.js b/src/ODataResource.js
index <HASH>..<HASH> 100644
--- a/src/ODataResource.js
+++ b/src/ODataResource.js
@@ -12,7 +12,7 @@ function hook(resource, pos, fn) {
/*eslint-disable */
method.map((curr) => {
if (resource._hooks[curr][pos]) {
- const _fn = resource._hooks[method][pos];
+ const _fn = resource._hooks[curr][pos];
resource._hooks[curr][pos] = (...args) => {
_fn.apply(resource, args);
fn.apply(resource, args); | fix(resource): cant set `all hook` over 1 more function for a resource. | TossShinHwa_node-odata | train | js |
ad69ab4e173a67adef0b41ff2cf5e9a340c07221 | diff --git a/lib/tty/prompt/reader/win_console.rb b/lib/tty/prompt/reader/win_console.rb
index <HASH>..<HASH> 100644
--- a/lib/tty/prompt/reader/win_console.rb
+++ b/lib/tty/prompt/reader/win_console.rb
@@ -41,7 +41,11 @@ module TTY
#
# @api private
def get_char(options)
- options[:echo] ? @input.getc : WinAPI.getch.chr
+ if options[:raw]
+ WinAPI.getch.chr
+ else
+ options[:echo] ? @input.getc : WinAPI.getch.chr
+ end
end
end # Console
end # Reader | Change to fix console modes on windows. | piotrmurach_tty-prompt | train | rb |
21cc0dca2c3e9daeaffa740c4c86bb6c3212a52b | diff --git a/src/Command/Checker.php b/src/Command/Checker.php
index <HASH>..<HASH> 100644
--- a/src/Command/Checker.php
+++ b/src/Command/Checker.php
@@ -104,13 +104,7 @@ final class Checker extends Command
private function getDocheaderFileContent(InputInterface $input)
{
$docheaderFile = $input->getOption('docheader');
-
- if ('.docheader' === $docheaderFile) {
- $docheaderFile = getcwd() . '/' . $docheaderFile;
- }
-
- $docheader = (new DocheaderFileResolution())->__invoke($docheaderFile);
-
+ $docheader = (new DocheaderFileResolution())->resolve($docheaderFile);
$filter = new Filter(file_get_contents($docheader));
return $filter->apply();
diff --git a/src/Helper/DocheaderFileResolution.php b/src/Helper/DocheaderFileResolution.php
index <HASH>..<HASH> 100644
--- a/src/Helper/DocheaderFileResolution.php
+++ b/src/Helper/DocheaderFileResolution.php
@@ -32,7 +32,7 @@ final class DocheaderFileResolution
*
* @return string
*/
- public function __invoke($pathOrFile)
+ public function resolve($pathOrFile)
{
if (is_dir($pathOrFile)) {
$pathOrFile .= '/.docheader'; | Remove checker for `.docheader` file that was not needed and rename
`DocheaderFileResolution::__invoke()` to
`DocheaderFileResolution::resolve()` | malukenho_docheader | train | php,php |
843d6955aa2aac3b77b9094a1148bd5955e85f9f | diff --git a/subitem/client_test.go b/subitem/client_test.go
index <HASH>..<HASH> 100644
--- a/subitem/client_test.go
+++ b/subitem/client_test.go
@@ -21,7 +21,9 @@ func TestSubscriptionItemGet(t *testing.T) {
}
func TestSubscriptionItemList(t *testing.T) {
- i := List(&stripe.SubscriptionItemListParams{})
+ i := List(&stripe.SubscriptionItemListParams{
+ Subscription: stripe.String("sub_123"),
+ })
// Verify that we can get at least one item
assert.True(t, i.Next()) | Fix subscription item listing test
When listing subscription items a subscription ID is required. Our test
wasn't previously doing that, so here we add one in.
Caught while testing against a newer version of stripe-mock that
validates query parameters. | stripe_stripe-go | train | go |
Subsets and Splits
Java Commits in Train Set
Queries for all entries where the diff_languages column is 'java', providing a filtered dataset but without deeper analysis.
Java Commits Test Data
Returns a subset of 5000 entries from the dataset where the programming language difference is Java, providing basic filtering for exploration.
Java Commits Sample
Retrieves the first 1,000 records where the 'diff_languages' column is 'java', providing limited insight into the specific data entries.
Java Commits Validation Sample
Retrieves a sample of entries from the validation dataset where the diff languages are Java, providing limited insight into specific Java-related data points.
Java Commits in Validation
This query retrieves a limited sample of entries from the validation dataset where the programming language difference is Java, providing basic filtering with minimal insight.
Java Commits Sample
This query retrieves a sample of 100 records where the 'diff_languages' is 'java', providing basic filtering but limited analytical value.
Java Commits Sample
Retrieves 100 samples where the language difference is Java, providing basic filtering but minimal analytical value.
Java Commits Sample
Retrieves 10 samples where the diff_languages column is 'java', providing basic examples of data entries with this specific language.
Java Commits Validation Sample
Retrieves 1,000 records where the differences in languages are marked as Java, providing a snapshot of that specific subset but limited to raw data.
Java Commits Sample
This query retrieves 1000 random samples from the dataset where the programming language is Java, offering limited insight beyond raw data.