message
stringlengths
13
484
diff
stringlengths
38
4.63k
[sync] make Monitor methods thread safe this becomes important when multiple frontends try to start / stop the daemon at the same time
@@ -2892,6 +2892,8 @@ class SyncMonitor: self.paused_by_user = Event() self.paused_by_user.set() + self._lock = RLock() + self.startup = Event() self.fs_event_handler = FSEventHandler(self.syncing, self.startup, self.sync) @@ -2926,6 +2928,8 @@ class SyncMonitor: def start(self): """Creates observer threads and starts syncing.""" + with self._lock: + if self.running.is_set() or self.startup.is_set(): # do nothing if already started return @@ -3009,6 +3013,7 @@ class SyncMonitor: def pause(self): """Pauses syncing.""" + with self._lock: self.paused_by_user.set() self.syncing.clear() @@ -3021,6 +3026,7 @@ class SyncMonitor: def resume(self): """Checks for changes while idle and starts syncing.""" + with self._lock: if not self.paused_by_user.is_set(): return @@ -3030,6 +3036,8 @@ class SyncMonitor: def stop(self): """Stops syncing and destroys worker threads.""" + with self._lock: + if not self.running.is_set(): return
Remove duplicated "new feature" box on PPU page Closes
</p> </div> <div class="col-md-5"> - <p class="alert alert-info" style="margin-top: 0">This is a new, experimental feature. We'd love to <a href="mailto:{{ SUPPORT_TO_EMAIL }}" class="feedback-show" style="text-decoration: underline">hear your feedback</a>.</p> <p class="alert alert-info" style="margin-top: 0"> This is a new, experimental feature. We'd love to - <a href="mailto:{{ SUPPORT_TO_EMAIL }}" class="feedback-show" style="text-decoration: underline">hear your feedback</a>. + <a href="mailto:{{ SUPPORT_TO_EMAIL }}" class="feedback-show" style="text-decoration: underline"> + hear your feedback + </a>. </p> </div>
gift-pokemon: Gen III Both the new and the reboots
@@ -20,6 +20,11 @@ def gift_data(): G = get_version(u'gold') S = get_version(u'silver') C = get_version(u'crystal') + RU = get_version(u'ruby') + SA = get_version(u'sapphire') + EM = get_version(u'emerald') + FR = get_version(u'firered') + LG = get_version(u'leafgreen') return [ # Gen I [ u'bulbasaur', [ R, B ], 5, u'pallet-town' ], @@ -56,6 +61,30 @@ def gift_data(): [ u'shuckle', [ G, S, C ], 15, u'cianwood-city' ], [ u'dratini', [ C ], 15, u'dragons-den' ], [ u'tyrogue', [ G, S, C ], 10, u'mt-mortar', u'b1f' ], + + # Gen III + [ u'treecko', [ RU, SA, EM ], 5, u'hoenn-route-101' ], + [ u'torchic', [ RU, SA, EM ], 5, u'hoenn-route-101' ], + [ u'mudkip' , [ RU, SA, EM ], 5, u'hoenn-route-101' ], + [ u'wynaut', [ RU, SA, EM ], 0, u'lavaridge-town' ], + [ u'castform', [ RU, SA, EM ], 25, u'hoenn-route-119', u'weather-center' ], + [ u'beldum', [ RU, SA, EM ], 5, u'mossdeep-city', u'stevens-house' ], + [ u'chikorita', [ EM ], 5, u'littleroot-town' ], + [ u'cyndaquil', [ EM ], 5, u'littleroot-town' ], + [ u'totodile', [ EM ], 5, u'littleroot-town' ], + + [ u'bulbasaur', [ FR, LG ], 5, u'pallet-town' ], + [ u'charmander', [ FR, LG ], 5, u'pallet-town' ], + [ u'squirtle', [ FR, LG ], 5, u'pallet-town' ], + [ u'aerodactyl', [ FR, LG ], 5, u'pewter-city', u'museum-of-science' ], + [ u'magikarp', [ FR, LG ], 5, u'kanto-route-4', u'pokemon-center' ], + [ u'omanyte', [ FR, LG ], 5, u'mt-moon', u'b2f' ], + [ u'kabuto', [ FR, LG ], 5, u'mt-moon', u'b2f' ], + [ u'hitmonlee', [ FR, LG ], 25, u'saffron-city', u'fighting-dojo' ], + [ u'hitmonchan', [ FR, LG ], 25, u'saffron-city', u'fighting-dojo' ], + [ u'eevee', [ FR, LG ], 25, u'celadon-city', u'celadon-mansion' ], + [ u'lapras', [ FR, LG ], 25, u'saffron-city', u'silph-co-7f' ], + [ u'togepi', [ FR, LG ], 0, u'water-labyrinth' ], ]
Fixed quota update fails Fixed quota update fails
@@ -63,6 +63,7 @@ export class ManageEnvironmentComponent implements OnInit { if (this.getCurrentTotalValue()) { if (this.getCurrentTotalValue() >= this.getCurrentUsersTotal()) { this.manageUsersForm.controls['total'].setErrors(null); + if (this.manageUsersForm.controls['total'].value > 1000000000) this.manageUsersForm.controls['total'].setErrors({max: true}); this.manageUsersForm.controls['projects']['controls'].forEach(v => { v.controls['budget'].errors && 'max' in v.controls['budget'].errors ? null : v.controls['budget'].setErrors(null); @@ -123,7 +124,7 @@ export class ManageEnvironmentComponent implements OnInit { private userValidityCheck(control) { if (control && control.value) { - this.manageUsersForm.value.projects.find(v => v.project === control.parent.value.project).budget = control.value; + if (control.parent)this.manageUsersForm.value.projects.find(v => v.project === control.parent.value.project).budget = control.value; return (this.getCurrentTotalValue() && this.getCurrentTotalValue() < this.getCurrentUsersTotal()) ? { overrun: true } : null; } }
Put experimental examples at the end and order other examples alphabetically Test Plan: manual inspection Reviewers: sashank, yuhan
"title": "Asset Materialization", "description": "Record that a solid materialized an asset" }, - { - "name": "basic_pyspark", - "title": "PySpark", - "description": "Run PySpark code in solids" - }, { "name": "conditional_execution", "title": "Conditional Execution", "title": "Kubernetes Deployment", "description": "Deploy Dagster on Kubernetes" }, - { - "name": "simple_lakehouse", - "title": "Lakehouse (Experimental)", - "description": "Define a computation graph in terms of the tables it produces." - }, - { - "name": "multi_type_lakehouse", - "title": "Lakehouse with Pandas and Pyspark (Experimental)", - "description": "Use the Lakehouse API with multiple compute types." - }, { "name": "multi_location", "title": "Multi-Location Workspace", "description": "Unit-test a pipeline" }, { - "name": "memoized_development", - "title": "Memoization (Experimental)", - "description": "Memoizing results from previous pipeline runs." + "name": "basic_pyspark", + "title": "PySpark", + "description": "Run PySpark code in solids" }, { "name": "dynamic_graph", "title": "Dynamic Graph (Experimental)", "description": "Determine graph structure at runtime" + }, + { + "name": "simple_lakehouse", + "title": "Lakehouse (Experimental)", + "description": "Define a computation graph in terms of the tables it produces." + }, + { + "name": "multi_type_lakehouse", + "title": "Lakehouse w/ Two Types (Experimental)", + "description": "Use the Lakehouse API with multiple compute types." + }, + { + "name": "memoized_development", + "title": "Memoization (Experimental)", + "description": "Memoizing results from previous pipeline runs." } ]
Included: License file in the Linux package HG-- branch : TexText_0.6
@@ -4,10 +4,10 @@ rem rem The script creates a directory "textext-[Version]-linux" with a rem subdirectoy "extension". The extion files go into the extension rem subdirectory while the readme and setup-script are placed into -rem the "textext-[Version]-linu"x directory +rem the "textext-[Version]-linux" directory rem Some variables -set TexTextVersion=0.6 +set TexTextVersion=0.6.1 set PackagePath=texttext-%TexTextVersion%-linux set ExtensionPath=%PackagePath%\extension set PackageName=TexText-Linux-%TexTextVersion% @@ -32,7 +32,8 @@ call :copy_func textext.py %ExtensionPath% call :copy_func typesetter.py %ExtensionPath% call :copy_func win_app_paths.py %ExtensionPath% call :copy_func setup.py %PackagePath% -call :copy_func docs\Readme.pdf %PackagePath% +call :copy_func docs\README-TexText.pdf %PackagePath% +call :copy_func LICENSE.txt %PackagePath% rem If we have tar available on this machine build a tgz package rem (Output is directed to nul, errors ("2") are directed to nul)
Add type hinting in finite mdp See
import importlib from functools import partial +from typing import TYPE_CHECKING + import numpy as np from highway_env import utils +if TYPE_CHECKING: + from highway_env.envs import AbstractEnv + -def finite_mdp(env, - time_quantization=1., - horizon=10.): +def finite_mdp(env: 'AbstractEnv', + time_quantization: float = 1., + horizon: float = 10.) -> object: """ Time-To-Collision (TTC) representation of the state. @@ -72,7 +77,8 @@ def finite_mdp(env, raise ModuleNotFoundError("The finite_mdp module is required for conversion. {}".format(e)) -def compute_ttc_grid(env, time_quantization, horizon, considered_lanes="all"): +def compute_ttc_grid(env: 'AbstractEnv', time_quantization: float, horizon: float, considered_lanes: str = "all") \ + -> np.ndarray: """ For each ego-velocity and lane, compute the predicted time-to-collision to each vehicle within the lane and store the results in an occupancy grid. @@ -109,7 +115,7 @@ def compute_ttc_grid(env, time_quantization, horizon, considered_lanes="all"): return grid -def transition_model(h, i, j, a, grid): +def transition_model(h: int, i: int, j: int, a: int, grid: np.ndarray) -> np.ndarray: """ Deterministic transition from a position in the grid to the next. @@ -132,7 +138,7 @@ def transition_model(h, i, j, a, grid): return next_state -def clip_position(h, i, j, grid): +def clip_position(h: int, i: int, j: int, grid: np.ndarray) -> np.ndarray: """ Clip a position in the TTC grid, so that it stays within bounds.
[modules/ping] Use framework background update functionality see
@@ -15,7 +15,6 @@ Parameters: import re import time -import threading import core.module import core.widget @@ -25,37 +24,6 @@ import core.decorators import util.cli -def get_rtt(module, widget): - try: - widget.set("rtt-unreachable", False) - res = util.cli.execute( - "ping -n -q -c {} -W {} {}".format( - widget.get("rtt-probes"), - widget.get("rtt-timeout"), - widget.get("address"), - ) - ) - - for line in res.split("\n"): - if line.startswith( - "{} packets transmitted".format(widget.get("rtt-probes")) - ): - m = re.search(r"(\d+)% packet loss", line) - - widget.set("packet-loss", m.group(1)) - - if not line.startswith("rtt"): - continue - m = re.search(r"([0-9\.]+)/([0-9\.]+)/([0-9\.]+)/([0-9\.]+)\s+(\S+)", line) - - widget.set("rtt-min", float(m.group(1))) - widget.set("rtt-avg", float(m.group(2))) - widget.set("rtt-max", float(m.group(3))) - widget.set("rtt-unit", m.group(5)) - except Exception as e: - widget.set("rtt-unreachable", True) - - core.event.trigger("update", [module.id], redraw_only=True) class Module(core.module.Module): @@ -63,6 +31,8 @@ class Module(core.module.Module): def __init__(self, config, theme): super().__init__(config, theme, core.widget.Widget(self.rtt)) + self.background = True + widget = self.widget() widget.set("address", self.parameter("address", "8.8.8.8")) @@ -88,8 +58,35 @@ class Module(core.module.Module): return self.threshold_state(widget.get("rtt-avg"), 1000.0, 2000.0) def update(self): - thread = threading.Thread(target=get_rtt, args=(self, self.widget(),)) - thread.start() + widget = self.widget() + try: + widget.set("rtt-unreachable", False) + res = util.cli.execute( + "ping -n -q -c {} -W {} {}".format( + widget.get("rtt-probes"), + widget.get("rtt-timeout"), + widget.get("address"), + ) + ) + + for line in res.split("\n"): + if line.startswith( + "{} packets transmitted".format(widget.get("rtt-probes")) + ): + m = re.search(r"(\d+)% packet loss", line) + + widget.set("packet-loss", m.group(1)) + + if not line.startswith("rtt"): + continue + m = re.search(r"([0-9\.]+)/([0-9\.]+)/([0-9\.]+)/([0-9\.]+)\s+(\S+)", line) + + widget.set("rtt-min", float(m.group(1))) + widget.set("rtt-avg", float(m.group(2))) + widget.set("rtt-max", float(m.group(3))) + widget.set("rtt-unit", m.group(5)) + except Exception as e: + widget.set("rtt-unreachable", True) # vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
journal name validation When exporting, unusual characters cause an error in filename. so limited this with name validation.
@@ -31,6 +31,7 @@ from GUI.ui_dialog_journals import Ui_Dialog_journals from confirm_delete import DialogConfirmDelete import datetime import os +import re import sys import logging import traceback @@ -38,6 +39,7 @@ import traceback path = os.path.abspath(os.path.dirname(__file__)) logger = logging.getLogger(__name__) + def exception_handler(exception_type, value, tb_obj): """ Global exception handler useful in GUIs. tb_obj: exception.__traceback__ """ @@ -159,6 +161,12 @@ class DialogJournals(QtWidgets.QDialog): QtWidgets.QMessageBox.warning(None, 'Warning', "Journal name in use", QtWidgets.QMessageBox.Ok) return + # Check for unusual characters in filename that would affect exporting + valid = re.match('^[\ \w-]+$', name) is not None + if not valid: + QtWidgets.QMessageBox.warning(None, 'Warning - invalid characters', + "In the jornal name use only: a-z, A-z 0-9 - space", QtWidgets.QMessageBox.Ok) + return # update database journal = {'name':name, 'jentry': '', 'owner':self.settings['codername'], @@ -203,7 +211,7 @@ class DialogJournals(QtWidgets.QDialog): filename += ".txt" options = QtWidgets.QFileDialog.DontResolveSymlinks | QtWidgets.QFileDialog.ShowDirsOnly directory = QtWidgets.QFileDialog.getExistingDirectory(None, - "Select directory to save file", os.getenv('HOME'), options) + "Select directory to save file", os.path.expanduser('~'), options) if directory: filename = directory + "/" + filename data = self.journals[x]['jentry'] @@ -258,9 +266,19 @@ class DialogJournals(QtWidgets.QDialog): # check that no other journal has this name and it is not empty update = True if new_name == "": + QtWidgets.QMessageBox.warning(None, 'Warning', "No name was entered", + QtWidgets.QMessageBox.Ok) update = False for c in self.journals: if c['name'] == new_name: + QtWidgets.QMessageBox.warning(None, 'Warning', "Journal name in use", + QtWidgets.QMessageBox.Ok) + update = False + # Check for unusual characters in filename that would affect exporting + valid = re.match('^[\ \w-]+$', name) is not None + if not valid: + QtWidgets.QMessageBox.warning(None, 'Warning - invalid characters', + "In the jornal name use only: a-z, A-z 0-9 - space", QtWidgets.QMessageBox.Ok) update = False if update: # update source list and database
svtplay: Add support for tabs on genre pages Adds support for tabs on genre page, only works for the items on tabs that do not link to just a show page.
@@ -126,8 +126,22 @@ class Svtplay(Service, OpenGraphThumbMixin): def _genre(self, jansson): videos = [] - for i in jansson["clusterPage"]["clips"]: - videos.append(i["contentUrl"]) + parse = urlparse(self._url) + dataj= jansson["clusterPage"] + tab = re.search("tab=(.+)",parse.query) + if(tab): + tab = tab.group(1) + for i in dataj["tabs"]: + if i["slug"] == tab: + for n in i["content"]: + parse = urlparse(n["contentUrl"]) + if parse.path not in videos: + videos.append(parse.path) + else: + for i in dataj["clips"]: + parse = urlparse(i["contentUrl"]) + if parse.path not in videos: + videos.append(parse.path) return videos def find_all_episodes(self, options):
Add a Python 2 version of make_image_classifier as a workaround for somebody's Python 3 problems. (This is not pip installed.)
@@ -55,6 +55,17 @@ py_binary( ], ) +# The make_image_classifier script as a PY2 py_binary. +py_binary( + name = "make_image_classifier_py2", + srcs = ["make_image_classifier.py"], + main = "make_image_classifier.py", + python_version = "PY2", + deps = [ + ":make_image_classifier_main", + ], +) + py_test( name = "make_image_classifier_test", srcs = ["make_image_classifier_test.py"],
Fix undefined attribute bug in ELFBinaryFile. If the path to the ELF file is invalid, the _owns_file attribute would not be defined, but was accessed in __del__().
@@ -104,12 +104,12 @@ class ELFBinaryFile(object): """ def __init__(self, elf, memory_map=None): + self._owns_file = False if isinstance(elf, six.string_types): self._file = open(elf, 'rb') self._owns_file = True else: self._file = elf - self._owns_file = False self._elf = ELFFile(self._file) self._memory_map = memory_map or MemoryMap()
fix error on first annotation page open At first `checkpoint` localstorage entry does not exist, so `checkpoint` variable ends up to be `null`.
@@ -29,7 +29,7 @@ export const mutations = { localStorage.setItem('checkpoint', JSON.stringify(checkpoint)) }, loadPage(state) { - const checkpoint = JSON.parse(localStorage.getItem('checkpoint')) + const checkpoint = JSON.parse(localStorage.getItem('checkpoint')) || {} state.page = checkpoint[state.projectId] ? checkpoint[state.projectId] : 1 }, setProjectId(state, projectId) {
Update search_files.py GK fix for modified timestamp extraction
@@ -146,7 +146,7 @@ class FileSeekerZip(FileSeekerBase): if fnmatch.fnmatch(member, filepattern): try: extracted_path = self.zip_file.extract(member, path=self.temp_folder) # already replaces illegal chars with _ when exporting - f = extracted_path.infolist() + f = self.zip_file.getinfo(member) date_time = f.date_time date_time = time.mktime(date_time + (0, 0, -1)) os.utime(extracted_path, (date_time, date_time))
Add more discussion of the --capture-key and --keyfile-path options Move them down to the OPTIONS section.
@@ -73,11 +73,9 @@ blockdev list [pool_name]:: List all blockdevs that make up the specified pool, or all pools, if no pool name is given. key set <(--keyfile-path <path> | --capture-key)> <key_desc>:: - Set a key in the kernel keyring for use with encryption either from a keyfile - or captured interactively from user input. + Set a key in the kernel keyring for use with encryption. key reset <(--keyfile-path <path> | --capture-key)> <key_desc>:: - Reset the key data of an existing key in the kernel keyring either from a keyfile - or captured interactively from user input. + Reset the key data of an existing key in the kernel keyring. key unset <key_desc>:: Unset a key in the kernel keyring so it is no longer available for encryption operations. @@ -99,6 +97,16 @@ OPTIONS The key description of the key that should be used to encrypt the created pool. The key description must correspond to a key set in the kernel keyring with the *key* command. +--keyfile-path <path> | --capture-key:: + These mutually exclusive options allow a user to specify a key used + for encryption in one of two ways. The *--keyfile-path* option requires + an argument, the path to a file containing the key. If the + *--capture-key* option is selected instead, the user must enter the key + at the ensuing prompt. The key value is terminated at the first newline + character that the user enters, and does not include the newline + character. On the other hand, if the file specified as an argument for + the *--keyfile-path* option contains a newline character anywhere, the + newline character will be included in the key value. ENVIRONMENT VARIABLES ---------------------
Update bonus.rtl.sh change duplicated line
@@ -16,7 +16,6 @@ if [ ${#network} -eq 0 ]; then exit 1 fi -source /mnt/hdd/raspiblitz.conf # add default value to raspi config if needed if ! grep -Eq "^rtlWebinterface=" /mnt/hdd/raspiblitz.conf; then
Remove Rietveld CQ config. Rietveld CQ has already been disabled and is no longer supoorted. [email protected] No-Try: True
@@ -6,9 +6,6 @@ cq_name: "luci-py" git_repo_url: "https://chromium.googlesource.com/infra/luci/luci-py" cq_status_url: "https://chromium-cq-status.appspot.com" gerrit {} -rietveld { - url: "https://codereview.chromium.org" -} verifiers { gerrit_cq_ability { committer_list: "project-infra-committers"
[asciidoc] Repair user-provided ASCIIDOC_OPTIONS The ASCIIDOC_OPTIONS value from conf.py was being inserted at the end of the asciidoc command line, after the input file ('-'). This causes asciidoc to fail with: asciidoc: Too many arguments The solution is to insert ASCIIDOC_OPTIONS before the input file argument.
@@ -55,12 +55,13 @@ class CompileAsciiDoc(PageCompiler): binary = self.site.config.get('ASCIIDOC_BINARY', 'asciidoc') options = self.site.config.get('ASCIIDOC_OPTIONS', '') options = shlex.split(options) + command = [binary, '-b', 'html5', '-s'] + options + ['-'] if not is_two_file: m_data, data = self.split_metadata(data, post, lang) from nikola import shortcodes as sc new_data, shortcodes = sc.extract_shortcodes(data) - p = subprocess.Popen([binary, '-b', 'html5', '-s', '-'] + options, stdin=subprocess.PIPE, stdout=subprocess.PIPE) + p = subprocess.Popen(command, stdin=subprocess.PIPE, stdout=subprocess.PIPE) output = p.communicate(input=new_data.encode('utf8'))[0].decode('utf8') output, shortcode_deps = self.site.apply_shortcodes_uuid(output, shortcodes, filename=source_path, extra_context={'post': post}) return output, p.returncode, [], shortcode_deps
Return message even if no OS has matched This commit fixes and might also help keeping track of messages that do not match any OS in general.
@@ -193,6 +193,7 @@ class NapalmLogsServerProc(NapalmLogsProc): log.debug('No match found for %s', dev_os) if not ret: log.debug('Not matched any OS') + ret.append((None, msg_dict)) return ret def start(self):
feat(README): update some badges Currently commented
<a href="https://dev.azure.com/timotheemathieu/timotheemathieu/_build?definitionId=2"> <img alt="azure" src="https://dev.azure.com/timotheemathieu/timotheemathieu/_apis/build/status/rlberry-py.rlberry?branchName=refs%2Fpull%2F119%2Fmerge"> </a> +</p> +<p align="center"> <!-- <a href="https://img.shields.io/pypi/pyversions/rlberry"> <img alt="PyPI - Python Version" src="https://img.shields.io/pypi/pyversions/rlberry"> </a> --> -</p> - -<p align="center"> <!-- <a href="https://pypi.org/project/rlberry/"> <img alt="PyPI" src="https://img.shields.io/pypi/v/rlberry"> </a> -->
Prepare 1.30.0rc2 [ci skip-rust-tests]
@@ -5,6 +5,27 @@ This document describes releases leading up to the ``1.30.x`` ``stable`` series. See https://pants.readme.io/v1.30/docs/release-notes-1-30 for an overview of the changes in this release. +1.30.0rc2 (7/14/2020) +--------------------- + +Bugfixes +~~~~~~~~ + +* Fix Pytest XML reports and Coverage breaking with remote execution (cherrypick of #10136) (#10324) + `PR #10324 <https://github.com/pantsbuild/pants/pull/10324>`_ + +* Set dynamic-ui default based on CI (cherrypick of #10140) (#10325) + `PR #10140 <https://github.com/pantsbuild/pants/pull/10140>`_ + +* Hotfix Bandit breaking from Stevedore 3.0 release (#10322) + `PR #10322 <https://github.com/pantsbuild/pants/pull/10322>`_ + +Refactoring, Improvements, and Tooling +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +* Fix tests that relied on the docsite CNAME file. (cherrypick of #10337) (#10342) + `PR #10342 <https://github.com/pantsbuild/pants/pull/10342>`_ + 1.30.0rc1 (6/27/2020) ---------------------
sanitize ansi sequence close
@@ -55,6 +55,9 @@ GIT_REQUIRE_MINOR = 9 GIT_REQUIRE_PATCH = 0 +ANSI_ESCAPE = re.compile(r'\x1B\[[0-?]*[ -/]*[@-~]') + + class LoggingProcessWrapper(object): """ @@ -262,6 +265,9 @@ class GitCommand(StatusMixin, raise GitSavvyError( "`{}` failed.".format(command_str), show_panel=show_panel_on_stderr) + if stdout: + stdout = ANSI_ESCAPE.sub('', stdout) + return stdout def decode_stdout(self, stdout):
- updated logging Printing what is actually missing is more helpful than only full data.
@@ -130,8 +130,8 @@ class IntegrateFtrackNote(pyblish.api.InstancePlugin): if not note_text.solved: self.log.warning(( "Note template require more keys then can be provided." - "\nTemplate: {}\nData: {}" - ).format(template, format_data)) + "\nTemplate: {}\nMissing values for keys:{}\nData: {}" + ).format(template, note_text.missing_keys, format_data)) continue if not note_text:
Add src to sys.path if module is in src fixes
import sys +from ...masonry.utils.module import Module from .venv_command import VenvCommand @@ -25,19 +26,32 @@ class ScriptCommand(VenvCommand): module, callable_ = scripts[script].split(':') + src_in_sys_path = 'sys.path.append(\'src\'); '\ + if self._module.is_in_src() else '' + cmd = ['python', '-c'] cmd += [ '"import sys; ' 'from importlib import import_module; ' - 'sys.argv = {!r}; ' + 'sys.argv = {!r}; {}' 'import_module(\'{}\').{}()"'.format( - argv, module, callable_ + argv, src_in_sys_path, module, callable_ ) ] self.venv.run(*cmd, shell=True, call=True) + @property + def _module(self): + poetry = self.poetry + package = poetry.package + path = poetry.file.parent + module = Module( + package.name, path.as_posix() + ) + return module + def merge_application_definition(self, merge_args=True): if self._application is None \ or (self._application_definition_merged
llvm/codegen/scheduler: Use Any(AllHaveRun(PASS)) instead of EveryNCalls(1) to translate scheduler consideration queue The former approach considered Run scope executions and causes premature executions if custom scheduling rules were used.
@@ -2396,7 +2396,7 @@ from psyneulink.core.globals.parameters import Parameter, ParametersBase from psyneulink.core.globals.registry import register_category from psyneulink.core.globals.utilities import \ ContentAddressableList, call_with_pruned_args, convert_to_list, convert_to_np_array -from psyneulink.core.scheduling.condition import All, Always, Condition, EveryNCalls, Never +from psyneulink.core.scheduling.condition import All, AllHaveRun, Always, Any, Condition, Never from psyneulink.core.scheduling.scheduler import Scheduler from psyneulink.core.scheduling.time import Time, TimeScale from psyneulink.core.globals.preferences.preferenceset import PreferenceLevel, PreferenceSet, _assign_prefs @@ -9405,16 +9405,16 @@ class Composition(Composition_Base, metaclass=ComponentsMeta): break dep_group = group - # NOTE: This is not ideal we don't need to depend on - # the entire previous group. Only our dependencies - cond = [EveryNCalls(dep, 1) for dep in dep_group] - if node not in self.scheduler.conditions: - cond.append(Always()) - else: - node_conds = self.scheduler.conditions[node] - cond.append(node_conds) + # This condition is used to check of the step has passed. + # Not all nodes in the previous step need to execute + # (they might have other conditions), but if any one does we're good + # FIXME: This will fail if none of the previously considered + # nodes executes in this pass, but that is unlikely. + conds = [Any(*(AllHaveRun(dep, time_scale=TimeScale.PASS) for dep in dep_group))] if len(dep_group) else [] + if node in self.scheduler.conditions: + conds.append(self.scheduler.conditions[node]) - return All(*cond) + return All(*conds) def _input_matches_variable(self, input_value, var): var_shape = convert_to_np_array(var).shape
Change actions shortcuts in go_back and go_forward in Firefox mac When editing text `cmd-left` and `cmd-right` just move the cursor to the start or end of the line. `cmd-[` and `cmd-]` don't have that issue.
@@ -28,10 +28,10 @@ class BrowserActions: actions.key("cmd-n") def go_back(): - actions.key("cmd-left") + actions.key("cmd-[") def go_forward(): - actions.key("cmd-right") + actions.key("cmd-]") def go_home(): actions.key("cmd-shift-h")
SConstruct : use `universal_newlines` instead of `.decode()` `universal_newlines` offers better handling of encoding, and a clearer path to using the `text` argument when Python 2 support is dropped.
@@ -425,11 +425,11 @@ if env["PLATFORM"] != "win32" : if "g++" in os.path.basename( env["CXX"] ) : # Get GCC version. - gccVersion = subprocess.check_output( [ env["CXX"], "-dumpversion" ], env=env["ENV"] ).decode().strip() + gccVersion = subprocess.check_output( [ env["CXX"], "-dumpversion" ], env=env["ENV"], universal_newlines=True ).strip() if "." not in gccVersion : # GCC 7 onwards requires `-dumpfullversion` to get minor/patch, but this # flag does not exist on earlier GCCs, where minor/patch was provided by `-dumpversion`. - gccVersion = subprocess.check_output( [ env["CXX"], "-dumpfullversion" ], env=env["ENV"] ).decode().strip() + gccVersion = subprocess.check_output( [ env["CXX"], "-dumpfullversion" ], env=env["ENV"], universal_newlines=True ).strip() gccVersion = [ int( v ) for v in gccVersion.split( "." ) ] # GCC 4.1.2 in conjunction with boost::flat_map produces crashes when @@ -692,7 +692,7 @@ def runCommand( command ) : command = commandEnv.subst( command ) sys.stderr.write( command + "\n" ) - return subprocess.check_output( command, shell=True, env=commandEnv["ENV"] ).decode() + return subprocess.check_output( command, shell=True, env=commandEnv["ENV"], universal_newlines=True ) ############################################################################################### # The basic environment for building libraries
fix tests for operations pass site codes instead of locations
@@ -53,8 +53,12 @@ class TestOperation(TestCase): self.location_structure, ) - def check_operation(self, old_locations, new_locations, archived=True): - self.operation(self.domain, old_locations, new_locations).perform() + def check_operation(self, old_site_codes, new_site_codes, archived=True): + operation = self.operation(self.domain, old_site_codes, new_site_codes) + operation.valid() + operation.perform() + old_locations = operation.old_locations + new_locations = operation.new_locations old_location_ids = [loc.location_id for loc in old_locations] new_location_ids = [loc.location_id for loc in new_locations] operation_time = old_locations[0].metadata[DEPRECATED_AT] @@ -75,33 +79,33 @@ class TestMergeOperation(TestOperation): operation = MergeOperation def test_perform(self): - new_locations = [self.locations['Boston']] - old_locations = [self.locations['Cambridge'], self.locations['Somerville']] - self.check_operation(old_locations, new_locations) + new_site_codes = [self.locations['Boston'].site_code] + old_site_codes = [self.locations['Cambridge'].site_code, self.locations['Somerville'].site_code] + self.check_operation(old_site_codes, new_site_codes) class TestSplitOperation(TestOperation): operation = SplitOperation def test_perform(self): - old_locations = [self.locations['Boston']] - new_locations = [self.locations['Cambridge'], self.locations['Somerville']] - self.check_operation(old_locations, new_locations) + old_site_codes = [self.locations['Boston'].site_code] + new_site_codes = [self.locations['Cambridge'].site_code, self.locations['Somerville'].site_code] + self.check_operation(old_site_codes, new_site_codes) class TestExtractOperation(TestOperation): operation = ExtractOperation def test_perform(self): - old_locations = [self.locations['Boston']] - new_locations = [self.locations['Cambridge']] - self.check_operation(old_locations, new_locations, archived=False) + old_site_codes = [self.locations['Boston'].site_code] + new_site_codes = [self.locations['Cambridge'].site_code] + self.check_operation(old_site_codes, new_site_codes, archived=False) class TestMoveOperation(TestOperation): operation = MoveOperation def test_perform(self): - old_locations = [self.locations['Boston']] - new_locations = [self.locations['Cambridge']] - self.check_operation(old_locations, new_locations) + old_site_codes = [self.locations['Boston'].site_code] + new_site_codes = [self.locations['Cambridge'].site_code] + self.check_operation(old_site_codes, new_site_codes)
Added URL field back in. I am awaiting DOI for the paper. Have added journal URL as a placeholder.
@@ -18,4 +18,5 @@ Resources: DataAtWork: Publications: - Title: High-Order Accurate Direct Numerical Simulation of Flow over a MTU-T161 Low Pressure Turbine Blade + URL: https://www.journals.elsevier.com/computers-and-fluids AuthorName: A. S. Iyer, Y. Abe, B. C. Vermeire, P. Bechlars, R. D. Baier, A. Jameson, F. D. Witherden, and P. E. Vincent
'core' folder included to parameters related On-Premise cluster * 'core' folder included to parameters related On-Premise cluster Update is required because this sample was migrated to the samples/core folder * Update README.md
@@ -56,20 +56,14 @@ Open the Kubeflow pipelines UI. Create a new pipeline, and then upload the compi 1. The name of a GCP project. 2. An output directory in a Google Cloud Storage bucket, of the form `gs://<BUCKET>/<PATH>`. - On-Premise - For On-Premise cluster, the pipeline will create a Persistent Volume Claim (PVC), and download - automatically the - [source data](https://github.com/kubeflow/pipelines/tree/master/samples/core/tfx_cab_classification/taxi-cab-classification) - to the PVC. + For On-Premise cluster, the pipeline will create a Persistent Volume Claim (PVC), and download automatically the [source date](https://github.com/kubeflow/pipelines/tree/master/samples/core/tfx_cab_classification/taxi-cab-classification) to the PVC. 1. The `output` is PVC mount point for the containers, can be set to `/mnt`. 2. The `project` can be set to `taxi-cab-classification-pipeline-onprem`. 3. If the PVC mounted to `/mnt`, the value of below parameters need to be set as following: - - `column-names`: ` -/mnt/pipelines/samples/tfx/taxi-cab-classification/column-names.json` - - `train`: `/mnt/pipelines/samples/tfx/taxi-cab-classification/train.csv` - - `evaluation`: `/mnt/pipelines/samples/tfx/taxi-cab-classification/eval.csv` - - `preprocess-module`: `/mnt/pipelines/samples/tfx/taxi-cab-classification/preprocessing.py` - - + - `column-names`: `/mnt/pipelines/samples/core/tfx_cab_classification/taxi-cab-classification/column-names.json` + - `train`: `/mnt/pipelines/samples/core/tfx_cab_classification/taxi-cab-classification/train.csv` + - `evaluation`: `/mnt/pipelines/samples/core/tfx_cab_classification/taxi-cab-classification/eval.csv` + - `preprocess-module`: `/mnt/pipelines/samples/core/tfx_cab_classification/taxi-cab-classification/preprocessing.py` ## Components source
duplicate format_rule code into deduplication list view (the irony) planning to do refactoring the deduplication view in the near future which will remove duplicate code
@@ -967,7 +967,18 @@ class DeduplicationRuleListView(DataInterfaceSection, CRUDPaginatedViewMixin): return rule, None def _format_rule(self, rule): - ret = super()._format_rule(rule) + ret = { + 'id': rule.pk, + 'name': rule.name, + 'case_type': rule.case_type, + 'active': rule.active, + 'last_run': (ServerTime(rule.last_run) + .user_time(self.project_timezone) + .done() + .strftime(SERVER_DATETIME_FORMAT)) if rule.last_run else '-', + 'edit_url': reverse(self.edit_url_name, args=[self.domain, rule.pk]), + 'action_error': "", # must be provided because knockout template looks for it + } rule_properties = ( set(CaseDeduplicationActionDefinition.from_rule(rule).case_properties) - set(CaseListExplorerColumns.DEFAULT_COLUMNS)
Removed exception for empty methods I'll create a seperate ticket for that
@@ -374,9 +374,6 @@ class DeviceViewSet(CustomFieldModelViewSet): """ Execute a NAPALM method on a Device """ - if not request.GET.get('method'): - raise ServiceUnavailable('No NAPALM methods were specified.') - device = get_object_or_404(Device, pk=pk) if not device.primary_ip: raise ServiceUnavailable("This device does not have a primary IP address configured.")
Additional fix for Even though was closed with I discovered a small visual bug. The list of possible badges to get by people was incorrect (still using the old logic for selecting missing requirements); this commit fixes this issue.
@@ -324,6 +324,18 @@ class PersonManager(BaseUserManager): default=0, output_field=IntegerField())) + def passed_either(req_a, req_b): + return Sum(Case(When(trainingprogress__requirement__name=req_a, + trainingprogress__state='p', + trainingprogress__discarded=False, + then=1), + When(trainingprogress__requirement__name=req_b, + trainingprogress__state='p', + trainingprogress__discarded=False, + then=1), + default=0, + output_field=IntegerField())) + return self.annotate( passed_training=passed('Training'), passed_swc_homework=passed('SWC Homework'), @@ -331,6 +343,8 @@ class PersonManager(BaseUserManager): passed_discussion=passed('Discussion'), passed_swc_demo=passed('SWC Demo'), passed_dc_demo=passed('DC Demo'), + passed_homework=passed_either('SWC Homework', 'DC Homework'), + passed_demo=passed_either('SWC Demo', 'DC Demo'), ).annotate( # We're using Maths to calculate "binary" score for a person to # be instructor badge eligible. Legend: @@ -543,9 +557,9 @@ class Person(AbstractBaseUser, PermissionsMixin, DataPrivacyAgreementMixin): fields = [ ('passed_training', 'Training'), - ('passed_swc_homework', 'SWC Homework'), + ('passed_homework', 'SWC or DC Homework'), ('passed_discussion', 'Discussion'), - ('passed_swc_demo', 'SWC Demo'), + ('passed_demo', 'SWC or DC Demo'), ] try: return [name for field, name in fields if not getattr(self, field)] @@ -559,9 +573,9 @@ class Person(AbstractBaseUser, PermissionsMixin, DataPrivacyAgreementMixin): fields = [ ('passed_training', 'Training'), - ('passed_dc_homework', 'DC Homework'), + ('passed_homework', 'SWC or DC Homework'), ('passed_discussion', 'Discussion'), - ('passed_dc_demo', 'DC Demo'), + ('passed_demo', 'SWC or DC Demo'), ] try: return [name for field, name in fields if not getattr(self, field)]
compose: Replace hrefs with "tabindex=0" for all buttons. For all buttons in the compose box, `href="#"` is replaced by "tabindex=0" so that the buttons are still focusable. This change also fixes a bug that caused the Formatting button to redirect to All messages.
<div class="drag"></div> <div id="below-compose-content"> <input type="file" id="file_input" class="notvisible pull-left" multiple /> - <a class="message-control-button fa fa-smile-o" aria-label="{{_('Add emoji')}}" id="emoji_map" href="#" title="{{ _('Add emoji') }}"></a> - <a class="message-control-button fa fa-font" aria-label="{{ _('Formatting') }}" href="#" title="{{ _('Formatting') }}" data-overlay-trigger="message-formatting"></a> + <a class="message-control-button fa fa-smile-o" aria-label="{{_('Add emoji')}}" id="emoji_map" tabindex=0 title="{{ _('Add emoji') }}"></a> + <a class="message-control-button fa fa-font" aria-label="{{ _('Formatting') }}" tabindex=0 title="{{ _('Formatting') }}" data-overlay-trigger="message-formatting"></a> {% if max_file_upload_size_mib > 0 %} - <a class="message-control-button fa fa-paperclip notdisplayed" aria-label="{{ _('Attach files') }}" id="attach_files" href="#" title="{{ _('Attach files') }}"></a> + <a class="message-control-button fa fa-paperclip notdisplayed" aria-label="{{ _('Attach files') }}" id="attach_files" tabindex=0 title="{{ _('Attach files') }}"></a> {% endif %} - <a class="message-control-button fa fa-video-camera video_link" aria-label="{{ _('Add video call') }}" href="#" title="{{ _('Add video call') }}"></a> - <a id="undo_markdown_preview" class="message-control-button fa fa-edit" aria-label="{{ _('Write') }}" href="#" style="display:none;" title="{{ _('Write') }}"></a> - <a id="markdown_preview" class="message-control-button fa fa-eye" aria-label="{{ _('Preview') }}" href="#" title="{{ _('Preview') }}"></a> + <a class="message-control-button fa fa-video-camera video_link" aria-label="{{ _('Add video call') }}" tabindex=0 title="{{ _('Add video call') }}"></a> + <a id="undo_markdown_preview" class="message-control-button fa fa-edit" aria-label="{{ _('Write') }}" tabindex=0 style="display:none;" title="{{ _('Write') }}"></a> + <a id="markdown_preview" class="message-control-button fa fa-eye" aria-label="{{ _('Preview') }}" tabindex=0 title="{{ _('Preview') }}"></a> <a class="drafts-link" href="#drafts" title="{{ _('Drafts') }} (d)">{{ _('Drafts') }}</a> <span id="sending-indicator"></span> <div id="send_controls" class="new-style">
Update app-dev Kube docs to remove reference to settings-tp log file After the rust re-write, this no longer logs to a file be default.
@@ -608,21 +608,6 @@ create and submit a batch of transactions containing the configuration change. [2018-09-05 20:07:41.903 DEBUG core] received message of type: TP_PROCESS_REQUEST - * You can also connect to the ``sawtooth-settings-tp`` container on any pod, - then examine ``/var/log/sawtooth/logs/settings-xxxxxxx-debug.log``. (Each - Settings log file has a unique string in the name.) The messages will - resemble this example: - - .. code-block:: none - - . - . - . - [20:07:58.039 [MainThread] core DEBUG] received message of type: TP_PROCESS_REQUEST - [20:07:58.190 [MainThread] handler INFO] Setting setting - sawtooth.validator.transaction_families changed from None to [{"family": - "intkey", "version": "1.0"}, {"family":"sawtooth_settings", "version":"1.0"}, {"family":"xo", "version":"1.0"}, ... - #. Run the following command to check the setting change. You can use any container, such as a shell or another validator container.
Remove dangling cmake check for long typemeta Summary: TSIA Pull Request resolved:
@@ -37,28 +37,6 @@ if(EXISTS "/etc/os-release") endif() endif() -# ---[ Check if the data type long and int32_t/int64_t overlap. -cmake_push_check_state(RESET) -set(CMAKE_REQUIRED_FLAGS "-std=c++11") -CHECK_CXX_SOURCE_COMPILES( - "#include <cstdint> - - template <typename T> void Foo(); - template<> void Foo<int32_t>() {} - template<> void Foo<int64_t>() {} - int main(int argc, char** argv) { - Foo<long>(); - return 0; - }" CAFFE2_LONG_IS_INT32_OR_64) - -if (CAFFE2_LONG_IS_INT32_OR_64) - message(STATUS "Does not need to define long separately.") -else() - message(STATUS "Need to define long as a separate typeid.") - set(CAFFE2_UNIQUE_LONG_TYPEMETA 1) -endif() -cmake_pop_check_state() - # ---[ Check if std::exception_ptr is supported. cmake_push_check_state(RESET) set(CMAKE_REQUIRED_FLAGS "-std=c++11")
Don't store refs to files in cache File cache has been unused since file_reference were introduced, there's no point saving them to cache if they're never queried. Fixes
@@ -357,10 +357,7 @@ class UploadMethods: entities=msg_entities, reply_markup=markup, silent=silent, schedule_date=schedule, clear_draft=clear_draft ) - msg = self._get_response_message(request, await self(request), entity) - await self._cache_media(msg, file, file_handle, image=image) - - return msg + return self._get_response_message(request, await self(request), entity) async def _send_album(self: 'TelegramClient', entity, files, caption='', progress_callback=None, reply_to=None, @@ -399,16 +396,12 @@ class UploadMethods: r = await self(functions.messages.UploadMediaRequest( entity, media=fm )) - self.session.cache_file( - fh.md5, fh.size, utils.get_input_photo(r.photo)) fm = utils.get_input_media(r.photo) elif isinstance(fm, types.InputMediaUploadedDocument): r = await self(functions.messages.UploadMediaRequest( entity, media=fm )) - self.session.cache_file( - fh.md5, fh.size, utils.get_input_document(r.document)) fm = utils.get_input_media( r.document, supports_streaming=supports_streaming) @@ -720,16 +713,4 @@ class UploadMethods: ) return file_handle, media, as_image - async def _cache_media(self: 'TelegramClient', msg, file, file_handle, image): - if file and msg and isinstance(file_handle, - custom.InputSizedFile): - # There was a response message and we didn't use cached - # version, so cache whatever we just sent to the database. - md5, size = file_handle.md5, file_handle.size - if image: - to_cache = utils.get_input_photo(msg.media.photo) - else: - to_cache = utils.get_input_document(msg.media.document) - self.session.cache_file(md5, size, to_cache) - # endregion
UI: Improved help output wording * Make it more clear that some options should not be used by the end users.
@@ -437,7 +437,7 @@ debug_group.add_option( dest="profile", default=False, help="""\ -Enable vmprof based profiling of time spent. Defaults to off.""", +Enable vmprof based profiling of time spent. Not working currently. Defaults to off.""", ) debug_group.add_option( @@ -465,10 +465,11 @@ debug_group.add_option( dest="recompile_c_only", default=False, help="""\ -Take existing files and compile them again. Allows compiling edited C files -with the C compiler for quick debugging changes to the generated source. -Defaults to off. Depends on compiling Python source to determine which files it -should look at.""", +This is not incremental compilation, but for Nuitka development only. Takes +existing files and simply compile them as C again. Allows compiling edited +C files for quick debugging changes to the generated source, e.g. to see if +code is passed by, values output, etc, Defaults to off. Depends on compiling +Python source to determine which files it should look at.""", ) debug_group.add_option( @@ -479,7 +480,7 @@ debug_group.add_option( help="""\ Generate only C source code, and do not compile it to binary or module. This is for debugging and code coverage analysis that doesn't waste CPU. Defaults to -off.""", +off. Do not think you can use this directly.""", ) debug_group.add_option(
[tests] Improve devstack/post playbook efficiency By adjusting the syntax to the newer format, the fetch-subunit role will be skipped entirely, rather than run with all its tasks skipped. This gives us results sooner and burns less electricity, making the world a better place. :)
- hosts: all - roles: - - fetch-tox-output - - role: fetch-subunit-output + tasks: + - include_role: + name: fetch-tox-output + - include_role: + name: fetch-subunit-output when: fetch_subunit|default(true)|bool - - process-stackviz + - include_role: + name: process-stackviz
help: Fix warning in deactivate-your-account help page. After adding owner role, we allow last admin to deactivate but not allow the last owner to deactivate, and this commit fixes the warning about not allowing last admin.
your account at any time. !!! warn "" - If you are the only administrator in the organization, you cannot + If you are the only owner in the organization, you cannot deactivate your account. You'll need to - [add another administrator](/help/change-a-users-role) first. + [add another owner](/help/change-a-users-role) first. ## Related articles
Updated plot_fock_distribution removed the offset value 0.4 to center the fock state distribution on the ticks.
@@ -685,7 +685,7 @@ def plot_fock_distribution(rho, offset=0, fig=None, ax=None, N = rho.shape[0] - ax.bar(np.arange(offset, offset + N) - .4, np.real(rho.diag()), + ax.bar(np.arange(offset, offset + N), np.real(rho.diag()), color="green", alpha=0.6, width=0.8) if unit_y_range: ax.set_ylim(0, 1)
Document that the (redundant) second FASTQ header is always removed Closes
@@ -40,6 +40,10 @@ The output file format is also recognized from the file name extension. If the extensions was not recognized or when Cutadapt writes to standard output, the same format as the input is used for the output. +When writing a FASTQ file, a second header (the text after the ``+`` on the +third line of a record) that possibly exists in the input is removed. +When writing a FASTA file, line breaks within the sequence are removed. + See also :ref:`file format conversion <file-format-conversion>`. .. _compressed-files:
[Chore] Add release description of smart contract rollup binaries Problem: Smart contract rollup binaries were added, but we have not added descriptions for them that are to be included in the automated release steps. Solution: Add the descriptions for the new binaries, so that they will be included in the new releases.
@@ -42,6 +42,16 @@ in [ description = "Client for interacting with transaction rollup node"; supports = "PtLimaPt"; } + { + name = "octez-smart-rollup-client-PtMumbai"; + description = "Smart contract rollup CLI client for PtMumbai"; + supports = "PtMumbai"; + } + { + name = "octez-smart-rollup-node-PtMumbai"; + description = "Tezos smart contract rollup node for PtMumbai"; + supports = "PtMumbai"; + } ] ++ builtins.concatMap (protocol: [ { name = "octez-baker-${protocol}";
llvm, functions/LinearCombination,LinearMatrix: Use default value as base for _result_length
@@ -2264,8 +2264,7 @@ class LinearCombination(CombinationFunction): # ------------------------------- @property def _result_length(self): - # Input variable should be at least 2d - return np.atleast_2d(self.instance_defaults.variable).shape[1] + return len(self.instance_defaults.value) def get_input_struct_type(self): #FIXME this is ugly as HELL! @@ -4683,7 +4682,7 @@ class LinearMatrix(TransferFunction): # --------------------------------------- @property def _result_length(self): - return self.matrix.shape[1] + return len(self.instance_defaults.value) def get_output_struct_type(self):
Add sap combiner and deps to manifest for core collection * This fixes an issue with collection of the sap_hdb_version spec in core collection
@@ -103,6 +103,16 @@ plugins: - name: insights.combiners.hostname enabled: true + # needed to collect the sap_hdb_version spec that uses the Sap combiner + - name: insights.parsers.lssap + enabled: true + + - name: insights.parsers.saphostctrl + enabled: true + + - name: insights.combiners.sap + enabled: true + # needed because some specs aren't given names before they're used in DefaultSpecs - name: insights.core.spec_factory enabled: true
BaseStructType: accept names.Name instances as field names This will extend the choice of casing at least for fields from built-in structures. TN:
@@ -1296,14 +1296,19 @@ class BaseStructType(CompiledType): """ Bind input fields to `self` and initialize their name. - :param list[(str, AbstractNodeData)] fields: List of (name, field) for - this struct's fields. Inheritted fields must not appear in this - list. + :param list[(str|names.Name, AbstractNodeData)] fields: List of (name, + field) for this struct's fields. Inheritted fields must not appear + in this list. """ + self._fields = OrderedDict() for f_n, f_v in fields: - f_v.name = names.Name.from_lower(f_n) + f_v.name = (f_n if isinstance(f_n, names.Name) else + names.Name.from_lower(f_n)) f_v.struct = self - self._fields = OrderedDict(fields) + + # Use the "hidden" name so that lookups work on undecorated field + # names. + self._fields[f_v._name.lower] = f_v @property def py_nullexpr(self): @@ -1432,9 +1437,9 @@ class StructType(BaseStructType): """ :param name: See CompiledType.__init__. - :param list[(str, AbstractNodeData)] fields: List of (name, field) for - this struct's fields. Inheritted fields must not appear in this - list. + :param list[(str|names.Name, AbstractNodeData)] fields: List of (name, + field) for this struct's fields. Inheritted fields must not appear + in this list. """ super(StructType, self).__init__( name, location, doc, @@ -1606,8 +1611,9 @@ class ASTNodeType(BaseStructType): :param ASTNodeType|None base: ASTNodeType subclass corresponding to the base class for this node. None when creating the root node. - :param list[(str, AbstractNodeData)] fields: List of (name, field) for - this node's fields. Inherited fields must not appear in this list. + :param list[(str|names.Name, AbstractNodeData)] fields: List of (name, + field) for this node's fields. Inherited fields must not appear in + this list. :param langkit.envs.EnvSpec|None env_spec: Environment specification for this node, if any.
updates README typo default timeout for HTTP CHECK is 10 seconds, but the documentation say default timeout in 1 second. Updated to fix this typo
@@ -24,7 +24,7 @@ instances: # check_certificate_expiration: true # default is true # days_warning: 28 # default 14 # days_critical: 14 # default 7 - # timeout: 3 # in seconds. Default is 1. + # timeout: 3 # in seconds. Default is 10. - name: Example website (staging) url: http://staging.example.com/ ```
Fix bug in extract all for tarfiles Was getting hung up on "./" path prefixes coming from tarfile interface. Fix is to normalize member listing and names for tar files.
@@ -436,24 +436,38 @@ def _untar(src, select, unpack_dir): return _gen_unpack( unpack_dir, src, - tf.getmembers, - lambda tfinfo: tfinfo.name, + _tar_members_fun(tf), + _tar_member_name, tf.extractall, select) -def _gen_unpack(unpack_dir, src, list_members, member_name, extract_all, +def _tar_members_fun(tf): + def f(): + return [m for m in tf.getmembers() if m.name != "."] + return f + +def _tar_member_name(tfinfo): + return _strip_leading_dotdir(tfinfo.name) + +def _strip_leading_dotdir(path): + if path[:2] == "./": + return path[2:] + else: + return path + +def _gen_unpack(unpack_dir, src, list_members, member_name, extract, select): members = list_members() - member_names = [member_name(m) for m in members] + names = [member_name(m) for m in members] to_extract = [ - m for m in members - if not os.path.exists(os.path.join(unpack_dir, member_name(m)))] - extract_all(unpack_dir, to_extract) - _write_unpacked(member_names, unpack_dir, src) + m for m, name in zip(members, names) + if not os.path.exists(os.path.join(unpack_dir, name))] + extract(unpack_dir, to_extract) + _write_unpacked(names, unpack_dir, src) if select: - return _selected_source_paths(unpack_dir, member_names, select) + return _selected_source_paths(unpack_dir, names, select) else: - return _all_source_paths(unpack_dir, member_names) + return _all_source_paths(unpack_dir, names) def _write_unpacked(unpacked, unpack_dir, src): with open(_unpacked_src(unpack_dir, src), "w") as f:
BugFix: Logging Errors caused by wrong parameters for LOGGER.debug -- relicts from using print()
@@ -99,7 +99,7 @@ class _TargetCollector(object): # mark target names not in existing_anchors as UNDEFINED if key not in self.existing_anchors: item.state = TARGET_STATE.UNDEFINED - LOGGER.debug(key, TARGET_STATE.name(item.state)) + LOGGER.debug('%s %s', key, TARGET_STATE.name(item.state)) LOGGER.debug('------- existing anchors -------------') LOGGER.debug(self.existing_anchors) @@ -113,7 +113,7 @@ class _TargetCollector(object): anchor_name, TargetLookupItem(TARGET_STATE.UNDEFINED)) LOGGER.debug( - 'lookup_target', anchor_name, TARGET_STATE.name(item.state)) + 'lookup_target %s %s', anchor_name, TARGET_STATE.name(item.state)) if item.state == TARGET_STATE.PENDING: if anchor_name not in self.existing_anchors: item.state = TARGET_STATE.UNDEFINED @@ -140,9 +140,9 @@ class _TargetCollector(object): item = self.items.get(anchor_name, None) if item: LOGGER.debug( - 'store_target?', anchor_name, TARGET_STATE.name(item.state)) + 'store_target? %s %s', anchor_name, TARGET_STATE.name(item.state)) if item.state == TARGET_STATE.PENDING: - LOGGER.debug(' -> update:', target_counter_values) + LOGGER.debug(' -> update: %s', target_counter_values) # need A REAL DUPLICATE UNCONNECTED SEPARATE COPY!! item.state = TARGET_STATE.UPTODATE item.target_counter_values = copy.deepcopy(
DOC: Minor grammar fix I don't expect this to be controversial.
@@ -267,15 +267,14 @@ def least_squares( element (i, j) is the partial derivative of f[i] with respect to x[j]). The keywords select a finite difference scheme for numerical estimation. The scheme '3-point' is more accurate, but requires - twice as much operations compared to '2-point' (default). The - scheme 'cs' uses complex steps, and while potentially the most - accurate, it is applicable only when `fun` correctly handles - complex inputs and can be analytically continued to the complex - plane. Method 'lm' always uses the '2-point' scheme. If callable, - it is used as ``jac(x, *args, **kwargs)`` and should return a - good approximation (or the exact value) for the Jacobian as an - array_like (np.atleast_2d is applied), a sparse matrix or a - `scipy.sparse.linalg.LinearOperator`. + twice as many operations as '2-point' (default). The scheme 'cs' + uses complex steps, and while potentially the most accurate, it is + applicable only when `fun` correctly handles complex inputs and + can be analytically continued to the complex plane. Method 'lm' + always uses the '2-point' scheme. If callable, it is used as + ``jac(x, *args, **kwargs)`` and should return a good approximation + (or the exact value) for the Jacobian as an array_like (np.atleast_2d + is applied), a sparse matrix or a `scipy.sparse.linalg.LinearOperator`. bounds : 2-tuple of array_like, optional Lower and upper bounds on independent variables. Defaults to no bounds. Each array must match the size of `x0` or be a scalar, in the latter
New Salt Lake City entry, unkown date From:
@@ -14,3 +14,11 @@ The man on the ground was shot with a beanbag, resulting in heavy damage **Links** * https://www.reddit.com/r/nextfuckinglevel/comments/gtv4co/downtown_salt_lake_city_may_30th_2020_unarmed/ + +### Police shoot tear gas canister at man from close range, striking him in the chest | Uknown Date + +Man standing in a crowd of protestors was struck with a tear gas canister. + +**Links** + +* https://twitter.com/greg_doucette/status/1268333029526843392
Make sure we are storing IPv4Networks as strings in state db Fixes
@@ -28,12 +28,12 @@ class BaseLXDSetupController: def set_state(self, key, value): key = "{}.{}".format(self.state_key, key) - ret = app.state.set(key, value) + ret = app.state.set(key, str(value)) return ret def get_state(self, key): key = "{}.{}".format(self.state_key, key) - return app.state.get(key).decode('utf8') + return app.state.get(key) def next_screen(self): return controllers.use('controllerpicker').render()
[input] Cleaner termination logic, also, remove /tmp/bee.log Accidentially opened file /tmp/bee.log, without writing anything to it.
@@ -14,14 +14,18 @@ RIGHT_MOUSE = 3 WHEEL_UP = 4 WHEEL_DOWN = 5 +def is_terminated(): + for thread in threading.enumerate(): + if thread.name == "MainThread" and not thread.is_alive(): + return True + return False + def read_input(inp): """Read i3bar input and execute callbacks""" epoll = select.epoll() epoll.register(sys.stdin.fileno(), select.EPOLLIN) - f = open("/tmp/bee.log", "a") while inp.running: - for thread in threading.enumerate(): - if thread.name == "MainThread" and not thread.is_alive(): + if is_terminated(): return events = epoll.poll(1)
added trailing comma make sure black stops complaining
@@ -49,7 +49,7 @@ callPackage (nur.repo-sources."%s" + "/%s") {} # TODO find commit hash prefixes = { "nixpkgs": "https://github.com/nixos/nixpkgs/tree/master/", - "nur": "https://github.com/nix-community/nur-combined/tree/master/" + "nur": "https://github.com/nix-community/nur-combined/tree/master/", } stripped = path.parts[4:] attrPath = "/".join(stripped[1:])
session: disable resource-limits for private IP sessions When hosting a server over .onion, all resource usage was accounted against the private IP of the Tor gateway (e.g. localhost). related:
@@ -17,6 +17,7 @@ import time from collections import defaultdict from functools import partial from ipaddress import IPv4Address, IPv6Address +from typing import Optional import attr import pylru @@ -765,18 +766,24 @@ class SessionManager: for session in self.sessions: await self._task_group.spawn(session.notify, touched, height_changed) - def _ip_addr_group_name(self, session): + def _ip_addr_group_name(self, session) -> Optional[str]: host = session.remote_address().host if isinstance(host, IPv4Address): - return '.'.join(str(host).split('.')[:3]) + if host.is_private: # exempt private addresses + return None + return '.'.join(str(host).split('.')[:3]) # /24 if isinstance(host, IPv6Address): - return ':'.join(host.exploded.split(':')[:3]) + if host.is_private: + return None + return ':'.join(host.exploded.split(':')[:3]) # /48 return 'unknown_addr' def _timeslice_name(self, session): return f't{int(session.start_time - self.start_time) // 300}' - def _session_group(self, name, weight): + def _session_group(self, name: Optional[str], weight: float) -> Optional[SessionGroup]: + if name is None: + return None group = self.session_groups.get(name) if not group: group = SessionGroup(name, weight, set(), 0) @@ -790,6 +797,7 @@ class SessionManager: self._session_group(self._timeslice_name(session), 0.03), self._session_group(self._ip_addr_group_name(session), 1.0), ) + groups = (group for group in groups if group is not None) self.sessions[session] = groups for group in groups: group.sessions.add(session)
Update noaa-rtma.yaml Update contact
@@ -5,7 +5,8 @@ Description: | Data is available from the start of 2019 until present. Documentation: https://www.nco.ncep.noaa.gov/pmb/products/rtma/ Contact: | - For any questions regarding data delivery not associated with this platform or any general questions regarding the NOAA Big Data Program, email [email protected]. + For any questions regarding data delivery or any general questions regarding the NOAA Open Data Dissemination (NODD) Program, email the NODD Team at [email protected]. + <br /> We also seek to identify case studies on how NOAA data is being used and will be featuring those stories in joint publications and in upcoming events. If you are interested in seeing your story highlighted, please share it with the NODD team by emailing [email protected] ManagedBy: "[NOAA](http://www.noaa.gov/)" UpdateFrequency: Hourly Collabs:
typeahead_helper: Add test coverage for highlighting. Specificially, the test_highlight_with_escaping, used in most of our typeaheads.
@@ -7,6 +7,7 @@ add_dependencies({ people: 'js/people.js', typeahead_helper: 'js/typeahead_helper.js', util: 'js/util.js', + Handlebars: 'handlebars', }); var popular = {num_items: function () { @@ -138,3 +139,23 @@ _.each(matches, function (person) { ]); }()); + +(function test_highlight_with_escaping() { + var item = "Denmark"; + var query = "Den"; + var expected = "<strong>Den</strong>mark"; + var result = th.highlight_with_escaping(query, item); + assert.equal(result, expected); + + item = "w3IrD_naMe"; + query = "w3IrD_naMe"; + expected = "<strong>w3IrD_naMe</strong>"; + result = th.highlight_with_escaping(query, item); + assert.equal(result, expected); + + item = "development help"; + query = "development h"; + expected = "<strong>development h</strong>elp"; + result = th.highlight_with_escaping(query, item); + assert.equal(result, expected); +}());
Update finders.py Fix for PYPI formed paths
@@ -141,6 +141,9 @@ class PathFinder(BaseFinder): for path in glob('{0}/lib/python*/site-packages'.format(self.virtual_env)): if path not in self.paths: self.paths.append(path) + for path in glob('{0}/lib/python*/*/site-packages'.format(self.virtual_env)): + if path not in self.paths: + self.paths.append(path) for path in glob('{0}/src/*'.format(self.virtual_env)): if os.path.isdir(path): self.paths.append(path)
Avoid logging to both terminal and GUI The problem was that gui loggin gautomatically pushes unformatted log messages to the terminal, thus all messages appear twice.
@@ -121,14 +121,14 @@ def log(message, level="INFO", origin=None, prefix=""): # log to terminal or Blender if prefs.logtoterminal: print(terminalmsg) - + else: # log in GUI depending on loglevel import sys # start from this function frame = sys._getframe(1) f_name = frame.f_code.co_name # go back until operator (using execute) - while f_name != 'execute' and frame != None: + while f_name != 'execute' and frame is not None: frame = frame.f_back f_name = frame.f_code.co_name
Update rmsrat.txt Update for Reference section.
# See the file 'LICENSE' for copying permission # Reference: https://twitter.com/James_inthe_box/status/1118968911590907904 +# Reference: https://twitter.com/James_inthe_box/status/1121513004627927040 159.69.48.50:5655
llvm/codegen/UDF: Enable flatten on non-pointer operands Needed for tuple and list literals. Don't use alloca.
@@ -182,21 +182,20 @@ class UserDefinedFunctionVisitor(ast.NodeVisitor): shape = helpers.get_array_shape(val) return ir.ArrayType(self.ctx.float_ty, len(shape))(shape) elif node.attr == "flatten": + if helpers.is_pointer(val): + val = self.builder.load(val) def flatten(builder): - shape = helpers.get_array_shape(val) - flattened_size = reduce(lambda x, y: x * y, shape) - flattened_ty = ir.ArrayType(self.ctx.float_ty, flattened_size) - flattened_array = builder.alloca(flattened_ty) - index_var = builder.alloca(self.ctx.int32_ty, name="flattened_index_var_loc") - builder.store(self.ctx.int32_ty(0), index_var) - for (array_ptr,) in helpers.recursive_iterate_arrays(self.ctx, builder, val): - index = builder.load(index_var, name="flattened_index_var") - flattened_array_ptr = builder.gep(flattened_array, [self.ctx.int32_ty(0), index]) - array_val = builder.load(array_ptr) - builder.store(array_val, flattened_array_ptr) - index = builder.add(index, self.ctx.int32_ty(1), name="flattened_index_var_inc") - builder.store(index, index_var) - return flattened_array + res = [] + def collect(builder, x): + res.append(x) + return x + self._do_unary_op(builder, val, collect) + + assert len(res) > 0 + flat = ir.ArrayType(res[0].type, len(res))(ir.Undefined) + for i, v in enumerate(res): + flat = self.builder.insert_value(flat, v, i) + return flat return flatten elif node.attr == "astype": if helpers.is_pointer(val):
Add clarity on state space assumptions [ci skip]
@@ -41,6 +41,8 @@ def create_smooth_transition_models(initial_state, x_coords, y_coords, times, tu Notes ----- x_coords, y_coords and times must be of same length. + This method assumes a cartesian state space with velocities eg. (x, vx, y, vy). It returns + transition models for 2 cartesian coordinates and their corresponding velocities. """ state = deepcopy(initial_state) # don't alter platform state with calculations
Monkey patch PyGObject to make Gaphor boot on GTK4 Until is merged.
@@ -14,9 +14,19 @@ if os.getenv("GAPHOR_USE_GTK") != "NONE": gtk_version = "4.0" if os.getenv("GAPHOR_USE_GTK") == "4" else "3.0" gtk_source_version = "5" if os.getenv("GAPHOR_USE_GTK") == "4" else "4" + if gtk_version == "4.0": + # Monkey patch PyGObject + import gi.overrides.Gtk + + del gi.overrides.Gtk.TreeView.enable_model_drag_source + del gi.overrides.Gtk.TreeView.enable_model_drag_dest + gi.overrides.Gtk.ListStore.insert_with_valuesv = ( + gi.overrides.Gtk.ListStore.insert_with_values + ) + gi.require_version("Gtk", gtk_version) gi.require_version("Gdk", gtk_version) - if os.getenv("GAPHOR_USE_GTK") != "4": + if gtk_version == "3.0": gi.require_version("GtkSource", gtk_source_version)
chore: reference main branch of google-cloud-python Adjust google-cloud-python links to reference main branch.
@@ -12,7 +12,7 @@ processing power of Google's infrastructure. - `Product Documentation`_ .. |GA| image:: https://img.shields.io/badge/support-GA-gold.svg - :target: https://github.com/googleapis/google-cloud-python/blob/master/README.rst#general-availability + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#general-availability .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-bigquery.svg :target: https://pypi.org/project/google-cloud-bigquery/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-bigquery.svg
Remove brightness packet from oldcookie smartmatrx Remove the brightness packet from the test for oldcookie smartmatrix interfaces.
@@ -60,13 +60,13 @@ class TestSmartMatrix(MpfTestCase): call(b'\xba\x11\x00\x03\x04\x00\x00\x00\x00\x01\x02\x03') # frame ]) + #test old cookie self.machine.rgb_dmds.smartmatrix_2.update([0x00, 0x01, 0x02, 0x03]) self.advance_time_and_run(.1) start = time.time() while self.serial_mocks["com5"].write.call_count < 2 and time.time() < start + 10: time.sleep(.001) self.serial_mocks["com5"].write.assert_has_calls([ - call(b'\xba\x11\x00\x03\x14\x7f\x00\x00'), # brightness call(b'\x01\x00\x01\x02\x03') # frame ])
[COMMUNITY] -> Reviewer Please join us to welcome as a new reviewer to TVM. contributed extensively across different layers of the system, including layout transform, PaddlePaddle, TFLite, and ONNX frontend. [Commits History](https://github.com/apache/tvm/commits?author=blackkker) [Code Review](https://github.com/apache/tvm/pulls?q=reviewed-by:blackkker)
@@ -204,6 +204,7 @@ We do encourage everyone to work anything they are interested in. - [Lianmin Zheng](https://github.com/merrymercy): @merrymercy - [Min Chen](https://github.com/multiverstack-intellif): @multiverstack-intellif - [Xiyou Zhou](https://github.com/zxybazh): @zxybazh +- [@blackkker](https://github.com/blackkker): @blackkker ## List of Contributors - [Full List of Contributors](https://github.com/apache/tvm/graphs/contributors)
[sqlpp11] Add the scripts in the package and update PATH * Add the scripts in the package and update PATH * Apply pep8 tool Used: black --line-length 100 * Move tool to bin dir Requested by conan hooks
@@ -24,13 +24,21 @@ class Sqlpp11Conan(ConanFile): self.info.header_only() def source(self): - tools.get(**self.conan_data["sources"][self.version], - destination=self._source_subfolder, strip_root=True) + tools.get( + **self.conan_data["sources"][self.version], + destination=self._source_subfolder, + strip_root=True + ) def package(self): self.copy("LICENSE", dst="licenses", src=self._source_subfolder) self.copy("*.h", dst="include", src=os.path.join(self._source_subfolder, "include")) + self.copy("*", dst="bin", src=os.path.join(self._source_subfolder, "scripts")) def package_info(self): self.cpp_info.filenames["cmake_find_package"] = "Sqlpp11" self.cpp_info.filenames["cmake_find_package_multi"] = "Sqlpp11" + + bindir = os.path.join(self.package_folder, "bin") + self.output.info("Appending PATH environment variable: {}".format(bindir)) + self.env_info.PATH.append(bindir)
build: Ship libbase_static.a for macOS/Linux Needed for using e.g. `switches::kEnableFeatures` in electron/electron#13784.
@@ -83,6 +83,7 @@ BINARIES = { BINARIES_SHARED_LIBRARY = { 'darwin': [ + os.path.join('obj', 'base', 'libbase_static.a'), os.path.join('obj', 'components', 'cdm', 'renderer', 'librenderer.a'), os.path.join('obj', 'net', 'libhttp_server.a'), os.path.join('obj', 'third_party', 'webrtc', 'rtc_base', 'librtc_base.a'), @@ -104,6 +105,7 @@ BINARIES_SHARED_LIBRARY = { os.path.join('obj', 'third_party', 'pdfium', 'libpwl.a'), ], 'linux': [ + os.path.join('obj', 'base', 'libbase_static.a'), os.path.join('obj', 'components', 'cdm', 'renderer', 'librenderer.a'), os.path.join('obj', 'net', 'libhttp_server.a'), os.path.join('obj', 'third_party', 'webrtc', 'rtc_base', 'librtc_base.a'),
test: Remove testing of pyomo-hack The support for freeing the pyomo datastructures during pyomos solving phase was removed from pypsa in commit #5ac1325, since it proved to be too slow for productive use.
@@ -39,7 +39,7 @@ def test_lopf(): snapshots = network.snapshots for formulation, free_memory in product(["angles", "cycles", "kirchhoff", "ptdf"], - [{}, {"pypsa"}, {"pypsa", "pyomo-hack"}]): + [{}, {"pypsa"}]): network.lopf(snapshots=snapshots,solver_name=solver_name,formulation=formulation, free_memory=free_memory) print(network.generators_t.p.loc[:,network.generators.index]) print(network_r.generators_t.p.loc[:,network.generators.index])
SceneAlgo : Use `parallelProcessLocations()` in `parallelTraverse()` The two have basically identical implementations, save for the behaviour regarding copying of the functor. Ideally we will rationalise this further in the future so that `parallelProcessLocations()` can be used directly for everything.
@@ -44,80 +44,6 @@ namespace GafferScene namespace Detail { -template <class ThreadableFunctor> -class TraverseTask : public tbb::task -{ - - public : - - TraverseTask( - const GafferScene::ScenePlug *scene, - const Gaffer::ThreadState &threadState, - ThreadableFunctor &f - ) - : m_scene( scene ), m_threadState( threadState ), m_f( f ) - { - } - - TraverseTask( - const GafferScene::ScenePlug *scene, - const Gaffer::ThreadState &threadState, - const ScenePlug::ScenePath &path, - ThreadableFunctor &f - ) - : m_scene( scene ), m_threadState( threadState ), m_f( f ), m_path( path ) - { - } - - - ~TraverseTask() override - { - } - - task *execute() override - { - ScenePlug::PathScope pathScope( m_threadState, &m_path ); - - if( m_f( m_scene, m_path ) ) - { - IECore::ConstInternedStringVectorDataPtr childNamesData = m_scene->childNamesPlug()->getValue(); - const std::vector<IECore::InternedString> &childNames = childNamesData->readable(); - - set_ref_count( 1 + childNames.size() ); - - ScenePlug::ScenePath childPath = m_path; - childPath.push_back( IECore::InternedString() ); // space for the child name - for( std::vector<IECore::InternedString>::const_iterator it = childNames.begin(), eIt = childNames.end(); it != eIt; it++ ) - { - childPath[m_path.size()] = *it; - TraverseTask *t = new( allocate_child() ) TraverseTask( *this, childPath ); - spawn( *t ); - } - wait_for_all(); - } - - return nullptr; - } - - protected : - - TraverseTask( const TraverseTask &other, const ScenePlug::ScenePath &path ) - : m_scene( other.m_scene ), - m_threadState( other.m_threadState ), - m_f( other.m_f ), - m_path( path ) - { - } - - private : - - const GafferScene::ScenePlug *m_scene; - const Gaffer::ThreadState &m_threadState; - ThreadableFunctor &m_f; - GafferScene::ScenePlug::ScenePath m_path; - -}; - template<typename ThreadableFunctor> class LocationTask : public tbb::task { @@ -251,9 +177,13 @@ void parallelProcessLocations( const GafferScene::ScenePlug *scene, ThreadableFu template <class ThreadableFunctor> void parallelTraverse( const ScenePlug *scene, ThreadableFunctor &f, const ScenePlug::ScenePath &root ) { - tbb::task_group_context taskGroupContext( tbb::task_group_context::isolated ); // Prevents outer tasks silently cancelling our tasks - Detail::TraverseTask<ThreadableFunctor> *task = new( tbb::task::allocate_root( taskGroupContext ) ) Detail::TraverseTask<ThreadableFunctor>( scene, Gaffer::ThreadState::current(), root, f ); - tbb::task::spawn_root_and_wait( *task ); + // `parallelProcessLocations()` takes a copy of the functor at each location, whereas + // `parallelTraverse()` is intended to use the same functor for all locations. Wrap the + // functor in a cheap-to-copy lambda, so that the functor itself won't be copied. + auto reference = [&f] ( const ScenePlug *scene, const ScenePlug::ScenePath &path ) { + return f( scene, path ); + }; + parallelProcessLocations( scene, reference, root ); } template <class ThreadableFunctor>
Magic number increased to 120 Original size of 100 was insufficient to cover driver version when driver is "MongoDB Internal Driver"
@@ -48,6 +48,7 @@ class ClientSection(BaseSection): pos = line.find('client metadata') if pos != -1: + #MongoDB Internal Driver was pushing version number outside index, increased from 100 to 120 to accommodate tokens = line[pos:pos + 120].split(' ') ip, _ = tokens[3].split(':') ip_formatted = str(ip)
Test Rust SDK without default features enabled Runs unit tests both with and without default features enabled, to ensure that the SDK works in both scenarios.
@@ -29,4 +29,5 @@ services: volumes: - $SAWTOOTH_CORE:/project/sawtooth-core working_dir: /project/sawtooth-core/sdk/rust - command: cargo test + # Test that the SDK compiles with default features both enabled and disabled + command: bash -c "cargo test && cargo test --no-default-features"
rm backfill_vaccination_initiated This PR addresses `backfill_vaccination_initiated` is no longer called.
@@ -64,39 +64,6 @@ def derive_vaccine_pct(ds_in: MultiRegionDataset) -> MultiRegionDataset: return ds_in.replace_timeseries_wide_dates([ts_in_without_pcts, most_recent_pcts]) -def backfill_vaccination_initiated(dataset: MultiRegionDataset) -> MultiRegionDataset: - """Backfills vaccination initiated data from total doses administered and total completed. - - Args: - dataset: Input dataset. - - Returns: New dataset with backfilled data. - """ - administered = dataset.get_timeseries_bucketed_wide_dates(CommonFields.VACCINES_ADMINISTERED) - completed = dataset.get_timeseries_bucketed_wide_dates(CommonFields.VACCINATIONS_COMPLETED) - existing_initiated = dataset.get_timeseries_bucketed_wide_dates( - CommonFields.VACCINATIONS_INITIATED - ) - - # Compute and keep only time series with at least one real value - computed_initiated = administered - completed - computed_initiated = computed_initiated.dropna(axis=0, how="all") - # Keep the computed initiated only where there is not already an existing time series. - computed_initiated = computed_initiated.loc[ - ~computed_initiated.index.isin(existing_initiated.index) - ] - - # Use concat to prepend the VARIABLE index level, then reorder the levels to match the dataset. - computed_initiated = pd.concat( - {CommonFields.VACCINATIONS_INITIATED: computed_initiated}, - names=[PdFields.VARIABLE] + list(computed_initiated.index.names), - ).reorder_levels(timeseries.EMPTY_TIMESERIES_BUCKETED_WIDE_DATES_DF.index.names) - - return dataset.replace_timeseries_wide_dates( - [dataset.timeseries_bucketed_wide_dates, computed_initiated] - ).add_tag_to_subset(taglib.Derived("backfill_vaccination_initiated"), computed_initiated.index) - - STATE_LOCATION_ID = "state_location_id"
Update to support PySide2 QSignal definition and qt_min_version implementation are now independent of PyQt5
@@ -29,7 +29,7 @@ from pyqtgraph.Qt import QtGui, QtCore, loadUiType log = logging.getLogger(__name__) log.addHandler(logging.NullHandler()) -QtCore.QSignal = QtCore.pyqtSignal +QtCore.QSignal = QtCore.Signal def fromUi(*args, **kwargs): @@ -52,9 +52,11 @@ def fromUi(*args, **kwargs): def qt_min_version(major, minor=0): """ Check for a minimum Qt version. For example, to check for version 4.11 - or later, call ``check_qt_version(4, 11)``. + or later, call ``qt_min_version(4, 11)``. - :return bool: True if PyQt version >= min_version + :return bool: True if Python Qt bindings library version >= min_version """ - return (QtCore.QT_VERSION >= ((major << 16) + (minor << 8))) - + version = QtCore.qVersion().split(".") + version_major = int(version[0]) + version_minor = int(version[1]) + return (version_major > major) or ((version_major == major) and (version_minor >= minor))
Update phishing.txt Explicit phishing from ```nowddns.com``` dynamic domain.
@@ -7942,3 +7942,73 @@ oni.gov.ge # Reference: https://twitter.com/soccia555/status/1133435220814966784 pcbever.be + +# Reference: https://www.virustotal.com/gui/domain/nowddns.com/relations + +apple-intl.nowddns.com +apple-login.nowddns.com +apple-manage-information.nowddns.com +apple-order-manage.nowddns.com +apple-orders-manage.nowddns.com +apple-service.nowddns.com +appleid-apple-manage.nowddns.com +appleid-apple-order.nowddns.com +appleid-apple-orders.nowddns.com +appleid-manage-info.nowddns.com +appleid-orders-apple.nowddns.com +appleid-orders-manage.nowddns.com +appleid.nowddns.com +appleid1-intl.nowddns.com +appleorders-manage.nowddns.com +applesupportcenter.nowddns.com +check-securedorder.nowddns.com +confirmation-data-verify.nowddns.com +confirmation-paypalserv.nowddns.com +confirmation-servpaypal.nowddns.com +confirmation-your-account.nowddns.com +confirmedordernerenow.nowddns.com +cpanel.apple-login.nowddns.com +cpanel.paypal-login.nowddns.com +dailydevops-gitlab.nowddns.com +dailydevops.nowddns.com +facesbook.nowddns.com +information-babykq2324.nowddns.com +login-paypal.nowddns.com +login-sector1app.nowddns.com +mail.paypal-login.nowddns.com +manage-accountinfo-id.nowddns.com +manage-information-serv.nowddns.com +manage-information-suppp.nowddns.com +manage-orders-appleid.nowddns.com +manage-ordersecured.nowddns.com +manageorder-secured.nowddns.com +members-contacts.nowddns.com +membership-information-id.nowddns.com +order-appleid-apple.nowddns.com +orders-appleid-apple.nowddns.com +paypal-intl.nowddns.com +paypal-login.nowddns.com +paypal-securedcheck.nowddns.com +paypalaccountservice.nowddns.com +paypalsecure-pageservice.nowddns.com +purchase-assistance-mobi.nowddns.com +purchase-assistance-mor.nowddns.com +resolving-accountpurchase.nowddns.com +review-ordersecured.nowddns.com +secure-app-service.nowddns.com +secure-app-services.nowddns.com +secure-checkorderid.nowddns.com +secure-confirmation.nowddns.com +secure-myaccount.nowddns.com +secure-your-account.nowddns.com +secure-youraccount-page.nowddns.com +secure-youraccounts.nowddns.com +secure1-apple.nowddns.com +secured-manageorder.nowddns.com +secured-managepurchase.nowddns.com +secured-orderistore.nowddns.com +securedappleverify.nowddns.com +securepage-paypalserv.nowddns.com +servaccount-confirm.nowddns.com +service-data-confirm.nowddns.com +stuck-service-id68767.nowddns.com
Remove unnecessary parameter from function Parameter is not used by the function.
@@ -3680,7 +3680,7 @@ api-group-workflows/#api-rest-api-2-workflow-search-get) # Agile(Formerly Greenhopper) REST API implements # Resource: https://docs.atlassian.com/jira-software/REST/7.3.1/ ####################################################################### - def add_issues_to_backlog(self, sprint_id, issues): + def add_issues_to_backlog(self, issues): """ Adding Issue(s) to Backlog :param issues: list: List of Issue Keys
add sms sender to old script we have problems where govuk service is not migrated to sms_senders on new datbases. update this old script so we don't affect live systems, but when people rebuild their database from scratch they get the sms sender for govuk notify.
@@ -32,6 +32,13 @@ def upgrade(): unique=True) op.create_index(op.f('ix_service_sms_senders_service_id'), 'service_sms_senders', ['service_id'], unique=True) + # populate govuk seeded service + op.execute(""" + INSERT INTO service_sms_senders + (id, sms_sender, service_id, is_default, inbound_number_id, created_at, updated_at) + VALUES ('286d6176-adbe-7ea7-ba26-b7606ee5e2a4', 'GOVUK', 'd6aa2c68-a2d9-4437-ab19-3ae8eb202553', true, null, now(), null) + """) + def downgrade(): op.drop_index(op.f('ix_service_sms_senders_service_id'), table_name='service_sms_senders')
Flesh out the set of project URLs [skip ci] Include links to getlektor.com, as well as direct links to documentation and changelog.
@@ -7,7 +7,12 @@ license = BSD platforms = any author = Armin Ronacher author_email = [email protected] -url = http://github.com/lektor/lektor/ +url = https://www.getlektor.com/ +project_urls = + Homepage = https://www.getlektor.com/ + Source = https://github.com/lektor/lektor/ + Documentation = https://www.getlektor.com/docs/ + Changelog = https://github.com/lektor/lektor/blob/master/CHANGES.md classifiers = Framework :: Lektor Environment :: Web Environment
StandardLightVisualiser : Make point lights smaller They were quite a lot bigger than other lights Improvements - Viewer : Reduced size of point lights to better match other light sources.
@@ -631,7 +631,7 @@ IECoreGL::ConstRenderablePtr StandardLightVisualiser::pointRays( float radius ) { const float angle = M_PI * 2.0f * float(i)/(float)numRays; const V3f dir( 0.0, sin( angle ), -cos( angle ) ); - addRay( dir * (.5 + radius), dir * (1 + radius), vertsPerCurve->writable(), p->writable() ); + addRay( dir * ( 0.2f + radius ), dir * ( 0.5f + radius ), vertsPerCurve->writable(), p->writable() ); } IECoreGL::CurvesPrimitivePtr curves = new IECoreGL::CurvesPrimitive( IECore::CubicBasisf::linear(), false, vertsPerCurve );
Update minimal_seq2seq.py Fixed: Added encoder_type param to Seq2SeqModel constructor.
@@ -40,7 +40,7 @@ model_args = { "max_length": 15, } -model = Seq2SeqModel("bert-base-cased", "bert-base-cased", args=model_args) +model = Seq2SeqModel("bert", "bert-base-cased", "bert-base-cased", args=model_args) def count_matches(labels, preds):
[enahnce] Fetch doctype's roles in report if developer mode is on or is standard is no * [enahnce] Fetch doctype's role in report if developer mode is on or is standard is no * Update report.py lets not keep separate rules for developer_mode! - this is not discoverable by a new developer
@@ -49,7 +49,7 @@ class Report(Document): delete_custom_role('report', self.name) def set_doctype_roles(self): - if not self.get('roles'): + if not self.get('roles') and self.is_standard == 'No': meta = frappe.get_meta(self.ref_doctype) roles = [{'role': d.role} for d in meta.permissions if d.permlevel==0] self.set('roles', roles)
model: transformers: example: Fix accuracy assertion Fixes:
@@ -31,7 +31,8 @@ class TestExample(unittest.TestCase): stdout = subprocess.check_output([sys.executable, filepath]) lines = stdout.decode().split("\n") # Check the Accuracy - self.assertIn("Accuracy: 0.0", lines) + if not list(filter(lambda line: line.startswith("Accuracy: "), lines)): + raise AssertionError(f"Accuracy not found in: {lines}") # Check the predicted tag for line in lines: try:
fix: Fix deprecation warning for using ET.getiterator * Using the xml element tree getiterator function has been deprecated for a while it seems. To get rid of the warning I updated to using iter wrapped in a list which is all the getiterator function did.
@@ -56,7 +56,7 @@ def candlepin_broker(broker): if content: root = ET.fromstring('\n'.join(content)) # remove namespace before save to avoid urgly search - for node in root.getiterator(): + for node in list(root.iter()): prefix, has_namespace, postfix = node.tag.rpartition('}') if has_namespace: node.tag = postfix
Added exception and error message in find_lib() that gets raised if no files are found.
@@ -38,7 +38,7 @@ def find_lib(name, paths=[], dirHints=[]): if name in files: return os.path.join(root, name) - + raise Exception('Could not find file named "%s". Searched recursively in %s.' % (name, str(searchPaths))) class CLibrary:
Update average_median.py added doctest, fixed TypeError: list indices must be integers or slices, not float error due to number/2 producing float as index.
+def median(nums): """ Find median of a list of numbers. -Read more about medians: - https://en.wikipedia.org/wiki/Median -""" + >>> median([0]) + 0 + >>> median([4,1,3,2]) + 2.5 + Args: + nums: List of nums -def median(nums): - """Find median of a list of numbers.""" - # Sort list + Returns: + Median. + """ sorted_list = sorted(nums) - print("List of numbers:") - print(sorted_list) - - # Is number of items in list even? + med = None if len(sorted_list) % 2 == 0: - # Find index for first middle value. - mid_index_1 = len(sorted_list) / 2 - # Find index for second middle value. - mid_index_2 = -(len(sorted_list) / 2) - 1 - # Divide middle values by 2 to get average (mean). + mid_index_1 = len(sorted_list) // 2 + mid_index_2 = (len(sorted_list) // 2) - 1 med = (sorted_list[mid_index_1] + sorted_list[mid_index_2]) / float(2) - return med # Return makes `else:` unnecessary. - # Number of items is odd. - mid_index = (len(sorted_list) - 1) / 2 - # Middle index is median. + else: + mid_index = (len(sorted_list) - 1) // 2 med = sorted_list[mid_index] return med - def main(): - """Call average module to find median of a specific list of numbers.""" print("Odd number of numbers:") print(median([2, 4, 6, 8, 20, 50, 70])) print("Even number of numbers:") print(median([2, 4, 6, 8, 20, 50])) - if __name__ == '__main__': main()
Group libraries in TOC and add PyTorch Elastic Summary: Move XLA out of Notes and group with other libraries. Also adds link to PyTorch Elastic ![image](https://user-images.githubusercontent.com/8042156/76912125-f76d1080-686f-11ea-99d5-bb7be199adbd.png) Pull Request resolved:
@@ -16,7 +16,6 @@ PyTorch is an optimized tensor library for deep learning using GPUs and CPUs. :caption: Notes notes/* - PyTorch on XLA Devices <http://pytorch.org/xla/> .. toctree:: :maxdepth: 1 @@ -62,24 +61,15 @@ PyTorch is an optimized tensor library for deep learning using GPUs and CPUs. name_inference torch.__config__ <__config__> -.. toctree:: - :glob: - :maxdepth: 2 - :caption: torchvision Reference - - torchvision/index - .. toctree:: :maxdepth: 1 - :caption: torchaudio Reference + :caption: Libraries + PyTorch on XLA Devices <http://pytorch.org/xla/> + PyTorch Elastic (torchelastic) <https://pytorch.org/elastic/> torchaudio <https://pytorch.org/audio> - -.. toctree:: - :maxdepth: 1 - :caption: torchtext Reference - torchtext <https://pytorch.org/text> + torchvision/index .. toctree:: :glob:
Fixed problem where the scratchpad dropdown would not obey position and geometry.
@@ -184,6 +184,7 @@ class DropDownToggler(WindowVisibilityToggler): its floating x, y, width and height is set. """ if (not self.visible) or (not self.shown): + # SET GEOMETRY win = self.window screen = win.qtile.current_screen # calculate windows floating position and width/height @@ -194,8 +195,9 @@ class DropDownToggler(WindowVisibilityToggler): win.float_y = win.y win.width = int(screen.dwidth * self.width) win.height = int(screen.dheight * self.height) - - # SHOW + # Configure the new geometry + win._reconfigure_floating() + # Toggle the dropdown WindowVisibilityToggler.show(self)
Clean up the code for adding charm The readability of some of the if statements was slightly hard to comprehend because of the if, elif statements.
@@ -576,14 +576,14 @@ class AddCharmChange(ChangeInfo): context.origins[self.charm] = {str(None): origin} return self.charm - elif Schema.CHARM_STORE.matches(url.schema): + if Schema.CHARM_STORE.matches(url.schema): entity_id = await context.charmstore.entityId(self.charm) log.debug('Adding %s', entity_id) await context.client_facade.AddCharm(channel=None, url=entity_id, force=False) identifier = entity_id origin = client.CharmOrigin(source="charm-store", risk="stable") - elif Schema.CHARM_HUB.matches(url.schema): + if Schema.CHARM_HUB.matches(url.schema): ch = Channel('latest', 'stable') if self.channel: ch = Channel.parse(self.channel).normalize()
Make the egg build reproducible bit-for-bit Hard coding the timestamp Removing directory entries
@@ -19,7 +19,8 @@ rm -rf insights/parsers/* cp ../insights/parsers/__init__.py insights/parsers find insights -name '*.pyc' -delete -zip ../insights.egg -r EGG-INFO/ insights/ +find . -type f -exec touch -c -t 201801010000.00 {} \; +zip --no-dir-entries ../insights.zip -r EGG-INFO/ insights/ cd .. rm -rf tmp git checkout MANIFEST.in
Add /opt/homebrew to where spatialite extension can be found Helps homebrew on Apple Silicon setups find spatialite without needing a full path. Similar to Thanks,
@@ -51,6 +51,7 @@ SPATIALITE_PATHS = ( "/usr/lib/x86_64-linux-gnu/mod_spatialite.so", "/usr/local/lib/mod_spatialite.dylib", "/usr/local/lib/mod_spatialite.so", + "/opt/homebrew/lib/mod_spatialite.dylib", ) # Used to display /-/versions.json SpatiaLite information SPATIALITE_FUNCTIONS = (
Add Mumbai and Canada-Central as new regions to support CodeCommit in cr
@@ -102,6 +102,7 @@ def list_branches(repo_name, next_token=None): def region_supported(region): supported_regions = [ + "ca-central-1", # Canada (Central) "us-east-1", # US East (N. Virginia) "us-east-2", # US East (Ohio) "us-west-1", # US West (N. California) @@ -113,6 +114,7 @@ def region_supported(region): "ap-northeast-2", # Asia Pacific (Seoul) "ap-southeast-1", # Asia Pacific (Singapore) "ap-southeast-2", # Asia Pacific (Sydney) + "ap-south-1", # Asia Pacific (Mumbai) "sa-east-1" # South America (Sao Paulo) ] if region is not None and region in supported_regions:
Fix links in module packaging Fix dead links
@@ -30,8 +30,8 @@ skills: - name: hello ``` -Details on pointing opsdroid to extension modules can be found in the [configuration reference](configuration). -For more on creating skills, see the [next section](skills/index) of these docs. +Details on pointing opsdroid to extension modules can be found in the [configuration reference](configuration.md). +For more on creating skills, see the [next section](skills/index.md) of these docs. ## Packaged Python extensions
fix the static nested sampler to be able to work with plateaus of log(l)
@@ -685,6 +685,9 @@ class Sampler: delta_logz = np.logaddexp(0, np.max(self.live_logl) + logvol - logz) + plateau_mode = False + plateau_counter = 0 + nplateau = 0 stop_iterations = False # The main nested sampling loop. for it in range(sys.maxsize): @@ -726,8 +729,8 @@ class Sampler: stop_iterations = True if self.live_logl.ptp() == 0: warnings.warn( - 'We have reached the plateau in the likelihood we are stopping sampling' - ) + 'We have reached the plateau in the likelihood we are' + ' stopping sampling') stop_iterations = True if stop_iterations: @@ -742,9 +745,6 @@ class Sampler: self.saved_run.append(add_info) break - # Expected ln(volume) shrinkage. - logvol -= self.dlv - # After `update_interval` interations have passed *and* we meet # the criteria for moving beyond sampling from the unit cube, # update the bound using the current set of live points. @@ -757,11 +757,23 @@ class Sampler: self.nbound += 1 self.since_update = 0 - # Locate the "live" point with the lowest `logl`. worst = np.argmin(self.live_logl) # index + # Locate the "live" point with the lowest `logl`. worst_it = self.live_it[worst] # when point was proposed boundidx = self.live_bound[worst] # associated bound index + if not plateau_mode: + nplateau = (self.live_logl == self.live_logl[worst]).sum() + if nplateau > 1: + plateau_mode = True + plateau_counter = nplateau + plateau_dlogvol = np.log(1. / (self.nlive + 1)) + logvol + # Expected ln(volume) shrinkage. + if not plateau_mode: + logvol -= self.dlv + else: + logvol = logvol + np.log1p(-np.exp(plateau_dlogvol - logvol)) + # Set our new worst likelihood constraint. # Notice we are doing copies here because live_u and live_v # are updated in-place @@ -827,6 +839,10 @@ class Sampler: # Increment total number of iterations. self.it += 1 + if plateau_mode: + plateau_counter -= 1 + if plateau_counter == 0: + plateau_mode = False # Return dead point and ancillary quantities. yield IteratorResult(worst=worst, ustar=ustar,
Make feature boxplots by subgroups more flexible. Change the limit when no boxplots are shown to 150 and make it easier to see message when boxplots are omitted by adding a heading. Add a new message about using thumbnails for features > 30 but <= 150, if not specified.
"metadata": {}, "outputs": [], "source": [ - "if len(features_used) > 30:\n", - " display(Markdown('Since the data has more than 30 features, boxplots with feature values for all groups '\n", - " 'will be skipped. This experiment currently has {} features.'.format(len(features_used))))\n", - "else:\n", + "num_features = len(features_used)\n", "\n", + "if (num_features > 150):\n", + " display(Markdown('### Feature values by subgroup(s)'))\n", + " display(Markdown('Since the data has {} (> 150) features, boxplots with feature values for all groups '\n", + " 'will be skipped. This experiment currently has {} features.'.format(num_features)))\n", + "elif (30 < num_features <= 150 and not use_thumbnails):\n", + " display(Markdown('### Feature values by subgroup(s)'))\n", + " display(Markdown('Since the data has {} (> 30 but <= 150) features, you need to set `\"use_thumbnails\"` to `true` in your '\n", + " 'configuration file to generate boxplots with feature values for all groups.'.format(num_features)))\n", + "else:\n", " for group in groups_desc:\n", " display(Markdown('### Feature values by {}'.format(group)))\n", " display(Markdown('In all plots in this subsection the values are reported before '\n", " df_train_combined = pd.concat([df_train_feats, df_train_feats_all], sort=True)\n", " df_train_combined.reset_index(drop=True, inplace=True)\n", "\n", - " # decide on the the height per plot\n", - " num_features = len(features_used)\n", - "\n", " # Define the order of the boxes: put 'All data' first and 'No info' last.\n", " group_levels = sorted(list(df_train_feats[group].unique()))\n", " if 'No info' in group_levels:\n",
Update parallel-coordinates-plot.md Add example with unselected
@@ -5,12 +5,12 @@ jupyter: text_representation: extension: .md format_name: markdown - format_version: '1.1' - jupytext_version: 1.1.1 + format_version: '1.3' + jupytext_version: 1.13.7 kernel_info: name: python2 kernelspec: - display_name: Python 3 + display_name: Python 3 (ipykernel) language: python name: python3 language_info: @@ -22,7 +22,7 @@ jupyter: name: python nbconvert_exporter: python pygments_lexer: ipython3 - version: 3.7.3 + version: 3.9.0 plotly: description: How to make parallel coordinates plots in Python with Plotly. display_as: scientific @@ -171,6 +171,39 @@ fig = go.Figure(data= fig.show() ``` +### Unselected Line Color and Opacity + + +*New in 5.10* + +The color and opacity of unselected lines can be set with `unselected`. Here, we set the color to `lightgray` and the opacity to `0.5`. + +```python +import plotly.graph_objects as go + +fig = go.Figure(data= + go.Parcoords( + line_color='blue', + dimensions = list([ + dict(range = [1,5], + constraintrange = [1,2], # change this range by dragging the pink line + label = 'A', values = [1,4]), + dict(range = [1.5,5], + tickvals = [1.5,3,4.5], + label = 'B', values = [3,1.5]), + dict(range = [1,5], + tickvals = [1,2,4,5], + label = 'C', values = [2,4], + ticktext = ['text 1', 'text 2', 'text 3', 'text 4']), + dict(range = [1,5], + label = 'D', values = [4,2]) + ]), + unselected = dict(line = dict(color = 'lightgray', opacity = 0.5)) + ) +) +fig.show() +``` + #### Reference See [function reference for `px.(parallel_coordinates)`](https://plotly.com/python-api-reference/generated/plotly.express.parallel_coordinates) or https://plotly.com/python/reference/parcoords/ for more information and chart attribute options!
issue ansible: avoid touching setrlimit() on target. This replaces the previous method for capping poorly Popen() performance, instead entirely monkey-patching the problem function rather than simply working around it.
@@ -43,7 +43,6 @@ import operator import os import pwd import re -import resource import signal import stat import subprocess @@ -91,15 +90,41 @@ _fork_parent = None good_temp_dir = None -# issue #362: subprocess.Popen(close_fds=True) aka. AnsibleModule.run_command() -# loops the entire SC_OPEN_MAX space. CentOS>5 ships with 1,048,576 FDs by -# default, resulting in huge (>500ms) runtime waste running many commands. -# Therefore if we are a child, cap the range to something reasonable. -rlimit = resource.getrlimit(resource.RLIMIT_NOFILE) -if (rlimit[0] > 4096 or rlimit[1] > 4096) and not mitogen.is_master: - resource.setrlimit(resource.RLIMIT_NOFILE, (4096, 4096)) - subprocess.MAXFD = 4096 # Python <3.x -del rlimit +def subprocess__Popen__close_fds(self, but): + """ + issue #362, #435: subprocess.Popen(close_fds=True) aka. + AnsibleModule.run_command() loops the entire FD space on Python<3.2. + CentOS>5 ships with 1,048,576 FDs by default, resulting in huge (>500ms) + latency starting children. Therefore replace Popen._close_fds on Linux with + a version that is O(fds) rather than O(_SC_OPEN_MAX). + """ + try: + names = os.listdir('/proc/self/fd') + except OSError: + # May fail if acting on a container that does not have /proc mounted. + self._original_close_fds(but) + return + + for name in names: + if not name.isdigit(): + continue + + fd = int(name, 10) + if fd > 2 and fd != but: + try: + os.close(fd) + except OSError: + pass + + +if ( + sys.platform.startswith('linux') and + sys.version < '3.0' and + hasattr(subprocess.Popen, '_close_fds') and + not mitogen.is_master +): + subprocess.Popen._original_close_fds = subprocess.Popen._close_fds + subprocess.Popen._close_fds = subprocess__Popen__close_fds def get_small_file(context, path):
Update dorkbot.txt Merging from + Aliases field is created.
# Copyright (c) 2014-2019 Maltrail developers (https://github.com/stamparm/maltrail/) # See the file 'LICENSE' for copying permission +# Aliases: dorkbot, ngrbot + # Reference: http://www.microsoft.com/security/portal/threat/encyclopedia/Entry.aspx?Name=Win32/Dorkbot#tab=2 av.shannen.cc @@ -308,3 +310,11 @@ appupdate02.info 0days.me 0dayx.com a7aneek.net + +# Reference: https://github.com/advanced-threat-research/IOCs/blob/master/2012/2012-09-20-ngrbot-spreads-via-chat/ngrbot-spreads-via-chat.csv + +http.xxxx.zaberno.com + +# Generic trails + +/0xabad1dea.php
Sublime Settings: Fix mixed up lists and sets Don't mixup set/list usage. Decide for either one. As `completions` are accessed using an index in row 636 >> or completions and isinstance(completions[0][1], str) we must not use set() to create completions of themes/color schemes.
@@ -743,7 +743,7 @@ class KnownSettings(object): - contents (string): the path to commit to the settings """ hidden = _settings().get('settings.exclude_color_scheme_patterns') or [] - completions = set() + completions = [] for scheme_path in sublime.find_resources("*.tmTheme"): if any(hide in scheme_path for hide in hidden): continue @@ -752,7 +752,7 @@ class KnownSettings(object): "{} \tPackage: {}".format(file_name, package), scheme_path ) - completions.add(item) + completions.append(item) return completions @staticmethod @@ -773,7 +773,7 @@ class KnownSettings(object): - contents (string): the file name to commit to the settings """ hidden = _settings().get('settings.exclude_theme_patterns') or [] - completions = set() + completions = [] for theme in sublime.find_resources("*.sublime-theme"): theme = os.path.basename(theme) if any(hide in theme for hide in hidden): @@ -782,5 +782,5 @@ class KnownSettings(object): "{} \ttheme".format(theme), theme ) - completions.add(item) + completions.append(item) return sorted_completions(completions)
Force utf-8 decoding when querying metabase Some discord usernames contain unicode characters, which causes an decoding error as chardet isn't 100% and can't falsely detect the response text as
@@ -115,12 +115,12 @@ class Metabase(Cog): try: async with self.bot.http_session.post(url, headers=self.headers, raise_for_status=True) as resp: if extension == "csv": - out = await resp.text() + out = await resp.text(encoding="utf-8") # Save the output for use with int e self.exports[question_id] = list(csv.DictReader(StringIO(out))) elif extension == "json": - out = await resp.json() + out = await resp.json(encoding="utf-8") # Save the output for use with int e self.exports[question_id] = out
Update design.md Message-Id: Message-Id:
@@ -13,15 +13,14 @@ The goal of this exercise is to introduce the student to the concept of classes. - Know how to create a class. - Know how to create objects. - Understand that instantiating a class creates an object. -- Know that `__init__()` is what is known as a 'constructor'. -- Know that `__init__()` is called upon object creation. -- Know that `__init__()` is used to initialize the object upon instantiation. +- Know that `__init__()` is a 'constructor' and is used to initialize the object upon instantiation. +- Know that `__init__()` is called upon instantiation/object creation. - Know what a method is and how it differs from a function. - Know how to create a method. - Implement instance variables. - Implement class variables. - Understand the difference between instance and class variables. -- Use `pass` as a placeholder for class methods +- Use `pass` as a placeholder for class methods. ## Out of scope