message
stringlengths 13
484
| diff
stringlengths 38
4.63k
|
---|---|
increased version to 0.2
enabled Python 3.5 | @@ -3,10 +3,11 @@ from setuptools import setup, find_packages
setup(name='pyroSAR',
packages=find_packages(),
include_package_data=True,
- version='0.1',
+ version='0.2',
description='a framework for large-scale SAR satellite data processing',
classifiers=[
'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3.5',
],
install_requires=['progressbar2',
'pathos>=0.2',
|
undo react-snap workaround
we just have to make sure to serve 200.html to authenticated users | @@ -56,10 +56,6 @@ export default function AppRoute({
}
});
- //react-snap hydration workaround
- const [, setRerender] = useState(false);
- useEffect(() => setRerender(true), []);
-
const classes = useAppRouteStyles();
return isPrivate ? (
|
Temporary disable Python 3.12 tests
pathlib implementation has changed, fake pathlib
has to be adapted first
(see | @@ -10,7 +10,8 @@ jobs:
fail-fast: false
matrix:
os: [ubuntu-latest, macOS-latest, windows-latest]
- python-version: [3.7, 3.8, 3.9, "3.10", "3.11", "3.12-dev"]
+ python-version: [3.7, 3.8, 3.9, "3.10", "3.11"]
+# python-version: [3.7, 3.8, 3.9, "3.10", "3.11", "3.12-dev"]
include:
- python-version: "pypy-3.7"
os: ubuntu-latest
|
Ignore trailing whitespace
You can't see trailing whitespace, it has no bearing on code execution, the warning is unnecessary. | @@ -3,7 +3,7 @@ addopts = --durations=30 --quiet
[pycodestyle]
count = True
-ignore = E121,E123,E126,E133,E226,E241,E242,E704,W503,W504,W505,E741,W605,W293
+ignore = E121,E123,E126,E133,E226,E241,E242,E704,W503,W504,W505,E741,W605,W293,W291
max-line-length = 120
statistics = True
exclude=pymatgen/__init__.py,docs_rst/*.py
|
Typo
Just a small typo fix | @@ -3,7 +3,7 @@ r"""
Advanced: Making Dynamic Decisions and the Bi-LSTM CRF
======================================================
-Dyanmic versus Static Deep Learning Toolkits
+Dynamic versus Static Deep Learning Toolkits
--------------------------------------------
Pytorch is a *dynamic* neural network kit. Another example of a dynamic
|
DOC: special: fix docstring of diric(x, n)
Docstring showing usage of diric(x, n) was missing the n argument. | @@ -59,7 +59,7 @@ def diric(x, n):
The Dirichlet function is defined as::
- diric(x) = sin(x * n/2) / (n * sin(x / 2)),
+ diric(x, n) = sin(x * n/2) / (n * sin(x / 2)),
where `n` is a positive integer.
|
Use the CA cert in the SSL test instead of the external cert
This is because we're now using the CA cert to sign the external now, which makes it so that only the CA cert can be used for validation. | @@ -29,7 +29,7 @@ class TestSsl(AgentlessTestCase):
def test_ssl(self):
local_cert_path = join(self.workdir, 'cert.pem')
docl.copy_file_from_manager(
- '/etc/cloudify/ssl/cloudify_external_cert.pem', local_cert_path)
+ '/etc/cloudify/ssl/cloudify_internal_ca_cert.pem', local_cert_path)
ssl_client = create_rest_client(
rest_port='443', cert_path=local_cert_path)
|
Update integration-ArcSightESM.yml
make proxy of type boolean. | @@ -50,7 +50,7 @@ configuration:
- display: Use system proxy settings
name: proxy
defaultvalue: ""
- type: 0
+ type: 8
required: false
- display: Fetch incidents
name: isFetch
@@ -325,7 +325,7 @@ script:
Body: body
},
params.insecure,
- params.useproxy
+ params.proxy
);
if (res.StatusCode < 200 || res.StatusCode >= 300) {
|
Removed error from VCC-model
Remove error, where PLF was added as a factor to calculate the COP of the VCC. Also added documentation. | @@ -54,7 +54,7 @@ def calc_VCC(peak_cooling_load, q_chw_load_Wh, T_chw_sup_K, T_chw_re_K, T_cw_in_
q_cw_W = 0.0
elif q_chw_load_Wh > 0.0:
- COP = calc_COP_with_carnot_efficiency(peak_cooling_load, q_chw_load_Wh, T_chw_sup_K, T_cw_in_K, VC_chiller)
+ COP = calc_COP_with_carnot_efficiency(T_chw_sup_K, T_cw_in_K, VC_chiller)
if COP < 0.0:
print(f'Negative COP: {COP} {T_chw_sup_K} {T_chw_re_K} {T_cw_in_K}, {q_chw_load_Wh}', )
# calculate chiller outputs
@@ -77,14 +77,13 @@ def calc_COP(T_cw_in_K, T_chw_re_K, q_chw_load_Wh):
return COP
-def calc_COP_with_carnot_efficiency(peak_cooling_load, q_chw_load_Wh, T_chw_sup_K, T_cw_in_K, VC_chiller):
+def calc_COP_with_carnot_efficiency(T_chw_sup_K, T_cw_in_K, VC_chiller):
"""
- Calculate the weighted average Part load factor across all chillers based on load distribution and derive the
- COP based on that (while assuming carnot efficiency).
+ Calculate vapor compression chiller COP according to [Lee, 2010].
+
+ [Lee, 2010] Tzong-Shing Lee, 2010, Second-Law Analysis to Improve the Energy Efficiency of Screw Liquid Chillers
"""
- PLF = calc_averaged_PLF(peak_cooling_load, q_chw_load_Wh, T_chw_sup_K, T_cw_in_K, VC_chiller)
- # TODO: Update in documentation where the function below was taken from
- cop_chiller = VC_chiller.g_value * T_chw_sup_K / (T_cw_in_K - T_chw_sup_K) * PLF
+ cop_chiller = VC_chiller.g_value * T_chw_sup_K / (T_cw_in_K - T_chw_sup_K)
return cop_chiller
|
Fix installation instructions to use bash
Fix | @@ -13,10 +13,10 @@ Simply make sure you have [GDB 7.7 or higher](https://www.gnu.org/s/gdb) compile
```bash
# via the install script
## using curl
-$ sh -c "$(curl -fsSL http://gef.blah.cat/sh)"
+$ bash -c "$(curl -fsSL http://gef.blah.cat/sh)"
## using wget
-$ sh -c "$(wget http://gef.blah.cat/sh -O -)"
+$ bash -c "$(wget http://gef.blah.cat/sh -O -)"
# or manually
$ wget -O ~/.gdbinit-gef.py -q http://gef.blah.cat/py
|
Actually show "redefined" warnings, but only for "cdef extern" blocks, i.e. the exact case described in
Closes | @@ -451,8 +451,9 @@ class Scope(object):
# Likewise ignore inherited classes.
pass
elif visibility == 'extern':
- # Silenced, until we have a safer way to prevent pxd-defined cpdef functions from ending up here.
- warning(pos, "'%s' redeclared " % name, 0)
+ # Silenced outside of "cdef extern" blocks, until we have a safe way to
+ # prevent pxd-defined cpdef functions from ending up here.
+ warning(pos, "'%s' redeclared " % name, 1 if self.in_cinclude else 0)
elif visibility != 'ignore':
error(pos, "'%s' redeclared " % name)
entry = Entry(name, cname, type, pos = pos)
|
fix Python 2 compatibility
Actually cgi.escape is still available in Python3.7, but is deprecated.
So going forward, it's safer to import escape from html.
I disabled escaping quotes (quote=False) for consistency and because
it's not needed for this purpose. | @@ -7,7 +7,13 @@ import os.path
from collections import defaultdict
from itertools import chain
from time import time
-import html
+
+try:
+ # >= Py3.2
+ from html import escape
+except ImportError:
+ # < Py3.2
+ from cgi import escape
import six
from flask import Flask, make_response, jsonify, render_template, request
@@ -110,7 +116,7 @@ def request_stats():
stats.append({
"method": s.method,
"name": s.name,
- "safe_name": html.escape(s.name),
+ "safe_name": escape(s.name, quote=False),
"num_requests": s.num_requests,
"num_failures": s.num_failures,
"avg_response_time": s.avg_response_time,
|
tests: drop shrink_osd from tox.ini
shrink_osd has its own tox config file (tox-shrink_osd.ini) | @@ -322,7 +322,6 @@ setenv=
shrink_mds: MDS_TO_KILL = mds0
shrink_mgr: MGR_TO_KILL = mgr1
shrink_mon: MON_TO_KILL = mon2
- shrink_osd: OSD_TO_KILL = 0
shrink_rbdmirror: RBDMIRROR_TO_KILL = rbd-mirror0
shrink_rgw: RGW_TO_KILL = rgw0.rgw0
@@ -338,7 +337,6 @@ changedir=
cluster: {toxinidir}/tests/functional/all_daemons{env:CONTAINER_DIR:}
shrink_mon: {toxinidir}/tests/functional/shrink_mon{env:CONTAINER_DIR:}
shrink_mgn: {toxinidir}/tests/functional/shrink_mon{env:CONTAINER_DIR:}
- shrink_osd: {toxinidir}/tests/functional/shrink_osd{env:CONTAINER_DIR:}
shrink_mgr: {toxinidir}/tests/functional/shrink_mgr{env:CONTAINER_DIR:}
shrink_mds: {toxinidir}/tests/functional/shrink_mds{env:CONTAINER_DIR:}
shrink_rbdmirror: {toxinidir}/tests/functional/shrink_rbdmirror{env:CONTAINER_DIR:}
@@ -403,7 +401,6 @@ commands=
purge_dashboard: {[purge-dashboard]commands}
switch_to_containers: {[switch-to-containers]commands}
shrink_mon: {[shrink-mon]commands}
- shrink_osd: {[shrink-osd]commands}
shrink_mgr: {[shrink-mgr]commands}
shrink_mds: {[shrink-mds]commands}
shrink_rbdmirror: {[shrink-rbdmirror]commands}
|
bugfix in tell_many_at_point()
The new value of the data at point adopted the value of the mean of the new data samples, instead of the mean over all samples (new and old). Fixed! | @@ -370,7 +370,6 @@ class AverageLearner1D(Learner1D):
)
ys = list(ys) # cast to list *and* make a copy
- y_avg = np.mean(ys)
# If x is a new point:
if x not in self.data:
y = ys.pop(0)
@@ -379,21 +378,23 @@ class AverageLearner1D(Learner1D):
# If x is not a new point or if there were more than 1 sample in ys:
if len(ys) > 0:
- self.data[x] = y_avg
self._data_samples[x].extend(ys)
- n = len(self._data_samples[x])
+ n = len(ys)+self._number_samples[x]
+ # Same as n=len(self._data_samples[x]) but faster
+ self.data[x] = (np.mean(ys)*len(ys) + self.data[x]*self._number_samples[x])/n
+ # Same as self.data[x]=np.mean(self._data_samples[x]) but faster
self._number_samples[x] = n
# `self._update_data(x, y, "new")` included the point
# in _undersampled_points. We remove it if there are
# more than min_samples samples, disregarding neighbor_sampling.
if n > self.min_samples:
self._undersampled_points.discard(x)
- self.error[x] = self._calc_error_in_mean(self._data_samples[x], y_avg, n)
+ self.error[x] = self._calc_error_in_mean(self._data_samples[x], self.data[x], n)
self._update_distances(x)
self._update_rescaled_error_in_mean(x, "resampled")
if self.error[x] <= self.min_error or n >= self.max_samples:
self.rescaled_error.pop(x, None)
- super()._update_scale(x, y_avg)
+ super()._update_scale(x, self.data[x])
self._update_losses_resampling(x, real=True)
if self._scale[1] > self._recompute_losses_factor * self._oldscale[1]:
for interval in reversed(self.losses):
|
DOC: Fixed README formatting
[ci-skip] | <div align="center">
<img src="http://www.numpy.org/_static/numpy_logo.png"><br>
</div>
+
-----------------
| **`Travis CI Status`** |
|-------------------|
-|[](https://travis-ci.org/numpy/numpy)|
+[](https://travis-ci.org/numpy/numpy)|
NumPy is the fundamental package needed for scientific computing with Python.
|
Updated robot docs with better links to keyword documentation
Each section of keywords gets its own "Full Documentation" subsection
with links to the appropriate spot in Keywords.html. | @@ -236,6 +236,15 @@ The following variables defined in ``Salesforce.robot`` are all used by the ``Op
<http://robotframework.org/SeleniumLibrary/SeleniumLibrary.html#Set%20Selenium%20Timeout>`_ keyword.
Default: ``30 seconds``
+Full Documentation
+------------------
+
+To see the full list of keywords and their descriptions for this
+library, see the section titled
+`Salesforce.robot <Keywords.html#file-cumulusci/robotframework/Salesforce.robot>`_
+in the keyword documentation.
+
+
.. _cumulusci-library-overview:
CumulusCI Library
@@ -306,7 +315,10 @@ job:
Full Documentation
------------------
-All of the keyword documentation can be found in `Keywords.html <Keywords.html>`_.
+To see the full list of keywords and their descriptions for this
+library, see the section titled
+`CumulusCI <Keywords.html#file-cumulusci.robotframework.CumulusCI>`_
+in the keyword documentation.
.. _salesforce-library-overview:
@@ -361,6 +373,15 @@ In addition to the predefined locator strategies, the Salesforce library defines
* - ``title``
- This uses the Salesforce library keyword ``Locate Element by Title`` to find web elements that have a title attribute with the given string. For example, ``title:Appointment`` is shorthand for the xpath ``xpath://*[@title='Appointment']``
+Full Documentation
+------------------
+
+To see the full list of keywords and their descriptions for this
+library, see the section titled
+`Salesforce <Keywords.html#file-cumulusci.robotframework.Salesforce>`_
+in the keyword documentation.
+
+
PageObjects Library
===================
@@ -649,13 +670,16 @@ class which implements keywords which can be used with your custom
objects.
-Keyword Documentation
-=====================
+Full Documentation
+------------------
+
+To see the full list of keywords and their descriptions for page
+objects, see the following sections in the keyword documentation:
+
-Use the following links to download generated documentation for both
-the CumulusCI and Salesforce keywords
+* `PageObjects <Keywords.html#file-cumulusci.robotframework.PageObjects>`_
+* `Base Page Objects <Keywords.html#file-cumulusci/robotframework/pageobjects/BasePageObjects.py>`_
-* :download:`CumulusCI and Salesforce Keyword Documentation <../docs/robot/Keywords.html>`
CumulusCI Robot Tasks
=====================
|
Log BotInfo every time it is written to the datastore
Review-Url: | @@ -186,6 +186,7 @@ class BotInfo(_BotCommon):
super(BotInfo, self)._pre_put_hook()
if not self.task_id:
self.task_name = None
+ logging.info('Pre-put BotInfo: %s', self)
class BotEvent(_BotCommon):
|
Resolve after checking reachability
github issue: AdaCore/libadalang#45 | @@ -33,13 +33,6 @@ package body Langkit_Support.Lexical_Env is
with Inline;
-- Shed env rebindings that are not in the parent chain for From_Env
- function Decorate
- (El : Internal_Map_Element;
- MD : Element_Metadata;
- Rebindings : Env_Rebindings) return Entity;
- -- From an array of entities, decorate every element with additional
- -- Metadata stored in MD.
-
procedure Check_Rebindings_Unicity (Self : Env_Rebindings);
-- Perform a unicity check on the various rebindings in Self. In
-- particular, check that there are no two identical Old_Env and no two
@@ -404,7 +397,10 @@ package body Langkit_Support.Lexical_Env is
-- Return whether, according to Filter, Self should be discarded during
-- the lexical env lookup.
- function Append_Result (E : Entity) return Boolean;
+ function Append_Result
+ (El : Internal_Map_Element;
+ MD : Element_Metadata;
+ Rebindings : Env_Rebindings) return Boolean;
-- Add E to results, if it passes the Can_Reach filter. Return whether
-- result was appended or not.
@@ -425,15 +421,32 @@ package body Langkit_Support.Lexical_Env is
-- Append_Result --
-------------------
- function Append_Result (E : Entity) return Boolean is
+ function Append_Result
+ (El : Internal_Map_Element;
+ MD : Element_Metadata;
+ Rebindings : Env_Rebindings) return Boolean
+ is
+ E : constant Entity :=
+ (El => El.Element,
+ Info => (MD => Combine (El.MD, MD),
+ Rebindings => Rebindings));
begin
+
if Has_Trace then
- Traces.Trace (Me, "Found " & Image (Element_Image (E.El, False)));
+ Traces.Trace
+ (Me, "Found " & Image (Element_Image (E.El, False)));
end if;
if From = No_Element or else Can_Reach_F (E) then
- Results.Append (E);
+ declare
+ Resolved_Entity : constant Entity :=
+ (if El.Resolver = null
+ then E
+ else El.Resolver.all (E));
+ begin
+ Results.Append (Resolved_Entity);
return True;
+ end;
end if;
return False;
@@ -525,8 +538,9 @@ package body Langkit_Support.Lexical_Env is
-- TODO??? Use "for .. of next" GPL release
for I in reverse Elements.First_Index .. Elements.Last_Index loop
if Append_Result
- (Decorate (Elements.Get (I),
- Current_Metadata, Current_Rebindings))
+ (Elements.Get (I),
+ Current_Metadata,
+ Current_Rebindings)
then
if Stop_At_First then
goto Early_Exit;
@@ -920,26 +934,6 @@ package body Langkit_Support.Lexical_Env is
Rebindings => Shed_Rebindings (Env, E_Info.Rebindings));
end Shed_Rebindings;
- --------------
- -- Decorate --
- --------------
-
- function Decorate
- (El : Internal_Map_Element;
- MD : Element_Metadata;
- Rebindings : Env_Rebindings) return Entity
- is
- Result : constant Entity :=
- (El => El.Element,
- Info => (MD => Combine (El.MD, MD),
- Rebindings => Rebindings));
- begin
- return
- (if El.Resolver = null
- then Result
- else El.Resolver.all (Result));
- end Decorate;
-
------------------------------
-- Check_Rebindings_Unicity --
------------------------------
|
Updated index.rst
Added 'support' to index | @@ -59,7 +59,6 @@ You can see an independent benchmark comparing Rasa NLU to closed source alterna
evaluation
fallback
faq
- support
.. toctree::
:maxdepth: 1
@@ -83,3 +82,4 @@ You can see an independent benchmark comparing Rasa NLU to closed source alterna
migrations
license
changelog
+ support
|
Moved error messages above download button.
Alerts are usually displayed above the related content, not below. | {% load i18n %}
+
+{% if multimedia_state.has_form_errors %}
+ <div class="alert alert-danger"><i class="fa fa-exclamation-triangle"></i> {% blocktrans %}<strong>Warning:</strong>
+ This application contains forms with errors—we cannot pull any multimedia references from those forms.
+ {% endblocktrans %}</div>
+{% endif %}
+
+{% if multimedia_state.has_missing_refs %}
+ <div class="alert alert-warning">
+ <p>
+ {% blocktrans %}
+ <i class="fa fa-exclamation-triangle"></i>
+ This application is missing references, so this zip will be incomplete.
+ {% endblocktrans %}
+ </p>
+ {% if not is_multimedia_reference_checker %}
+ <p>
+ {% url "hqmedia_references" domain app.get_id as checker_url %}
+ {% blocktrans %}
+ Visit the <a href='{{ checker_url }}' target= '_blank'>
+ Multimedia Reference Checker
+ <i class='fa fa-external-link'></i>
+ </a>
+ to upload any missing multimedia.
+ {% endblocktrans %}
+ </p>
+ {% endif %}
+ </div>
+{% endif %}
+
{% if multimedia_state.has_media %}
{% if not include_modal and build_profile_access and not app.is_remote_app and app.build_profiles %}
<div class="form-inline">
{% else %}
<div class="alert alert-info">{% blocktrans %}This application currently does not contain any multimedia references.{% endblocktrans %}</div>
{% endif %}
-{% if multimedia_state.has_form_errors %}
- <div class="alert alert-danger"><i class="fa fa-exclamation-triangle"></i> {% blocktrans %}<strong>Warning:</strong>
- This application contains forms with errors—we cannot pull any multimedia references from those forms.
- {% endblocktrans %}</div>
-{% endif %}
-{% if multimedia_state.has_missing_refs %}
- <div class="alert alert-warning">
- <p>
- {% blocktrans %}
- <i class="fa fa-exclamation-triangle"></i>
- This application is missing references, so this zip will be incomplete.
- {% endblocktrans %}
- </p>
- {% if not is_multimedia_reference_checker %}
- <p>
- {% url "hqmedia_references" domain app.get_id as checker_url %}
- {% blocktrans %}
- Visit the <a href='{{ checker_url }}' target= '_blank'>
- Multimedia Reference Checker
- <i class='fa fa-external-link'></i>
- </a>
- to upload any missing multimedia.
- {% endblocktrans %}
- </p>
- {% endif %}
- </div>
-{% endif %}
{% if include_modal %}
{% url "download_multimedia_zip" domain app.get_id as multimedia_url %}
|
Fix breakage from
Summary: (https://github.com/pytorch/fairseq/commit/995c204337d16a6146a433cee360e5a5bfbc9a6f)?src_version_fbid=1030479880843010&dst_version_fbid=247617347518523&transaction_fbid=1601081576900014 | @@ -18,8 +18,6 @@ from typing import Any, Dict, Optional, Union
import numpy as np
import torch
-from omegaconf import DictConfig, OmegaConf, open_dict
-
from fairseq.data import data_utils
from fairseq.dataclass.configs import CheckpointConfig
from fairseq.dataclass.utils import (
@@ -29,6 +27,7 @@ from fairseq.dataclass.utils import (
from fairseq.distributed.fully_sharded_data_parallel import FSDP, has_FSDP
from fairseq.file_io import PathManager
from fairseq.models import FairseqDecoder, FairseqEncoder
+from omegaconf import DictConfig, OmegaConf, open_dict
logger = logging.getLogger(__name__)
@@ -216,7 +215,7 @@ def load_checkpoint(cfg: CheckpointConfig, trainer, **passthrough_args):
cfg.save_dir, "checkpoint_last{}.pt".format(suffix)
)
first_launch = not PathManager.exists(checkpoint_path)
- if first_launch and cfg.get("continue_once", None) is not None:
+ if first_launch and getattr(cfg, "continue_once", None) is not None:
checkpoint_path = cfg.continue_once
elif cfg.finetune_from_model is not None and first_launch:
# if there is no last checkpoint to restore, start the finetune from pretrained model
|
fix mkdocs issue
1. | @@ -22,10 +22,12 @@ from ruamel import yaml
PROJECT_BASE = os.getenv("FATE_PROJECT_BASE") or os.getenv("FATE_DEPLOY_BASE")
FATE_BASE = os.getenv("FATE_BASE")
+READTHEDOC = os.getenv("READTHEDOC")
def get_project_base_directory(*args):
global PROJECT_BASE
+ global READTHEDOC
if PROJECT_BASE is None:
PROJECT_BASE = os.path.abspath(
os.path.join(
@@ -33,7 +35,13 @@ def get_project_base_directory(*args):
os.pardir,
os.pardir,
os.pardir,
- os.pardir
+ )
+ )
+ if READTHEDOC is None:
+ PROJECT_BASE = os.path.abspath(
+ os.path.join(
+ PROJECT_BASE,
+ os.pardir,
)
)
if args:
|
Update upgrading.md
include the local_requirements.txt file to keep ldap from breaking during upgrades. | @@ -30,6 +30,12 @@ Copy the 'configuration.py' you created when first installing to the new version
# cp netbox-X.Y.Z/netbox/netbox/configuration.py netbox/netbox/netbox/configuration.py
```
+Copy your local requirements file if used:
+
+```no-highlight
+# cp netbox-X.Y.Z/local_requirements.txt netbox/local_requirements.txt
+```
+
Also copy the LDAP configuration if using LDAP:
```no-highlight
|
Upgrade Requests from 2.18.4 to 2.20.0 - CVE-2018-18074
Upgrade psutil from 5.6.3 to 5.6.6 - CVE-2019-18874 | @@ -23,7 +23,7 @@ itsdangerous==1.1.0
Jinja2==2.10.1
Mako==1.0.13
passlib==1.7.1
-psutil==5.6.3
+psutil==5.6.6
PyMySQL==0.9.3
python-dateutil==2.8.0
python-editor==1.0.4
@@ -35,7 +35,7 @@ speaklater==1.3
SQLAlchemy==1.3.5
Werkzeug==0.16.0
WTForms==2.2.1
-requests==2.18.4
+requests==2.20.0
flask-markdown==0.3
xmltodict==0.12.0
apscheduler==3.6.1
|
Store orientations before generating crystal; allow passing orientations from...
previously generated crystals of the same stoichiometry and sg | @@ -45,7 +45,7 @@ class molecular_crystal():
a volume factor, generates a molecular crystal consistent with the given
constraints. This crystal is stored as a pymatgen struct via self.struct
'''
- def __init__(self, sg, molecules, numMols, factor):
+ def __init__(self, sg, molecules, numMols, factor, allow_inversion=False, orientations=None):
#Necessary input
numMols = np.array(numMols) #must convert it to np.array
@@ -65,6 +65,14 @@ class molecular_crystal():
self.numMols = numMols * cellsize(self.sg)
self.volume = estimate_volume_molecular(self.numMols, self.boxes, self.factor)
self.wyckoffs = get_wyckoffs(self.sg, organized=True) #2D Array of Wyckoff positions organized by multiplicity
+ #Whether or not to allow chiral molecules to be flipped
+ self.allow_inversion = allow_inversion
+ #When generating multiple crystals of the same stoichiometry and sg,
+ #allow the user to re-use the allowed orientations, to reduce time cost
+ if orientations is None:
+ self.get_orientations()
+ else:
+ self.valid_orientations = orientations
self.generate_crystal()
@@ -76,12 +84,38 @@ class molecular_crystal():
self.Msg5 = 'Finishing: added the specie'
self.Msg6 = 'Finishing: added the whole structure'
- def check_compatible(self):
+ def get_orientations(self):
+ """
+ Calculate the valid orientations for each Molecule and Wyckoff position.
+ Returns a list with 4 indices:
+ index 1: the molecular prototype's index
+ index 2: the Wyckoff position's 1st index (based on multiplicity)
+ index 3: the WP's 2nd index (within the group of equal multiplicity)
+ index 4: the index of the valid orientation for the molecule/WP pair
+ For example, self.valid_orientations[i][j][k] would be a list of valid
+ orientations for self.molecules[i],
+ in the Wyckoff position self.wyckoffs[j][k]
"""
+ self.valid_orientations = []
+ for mol in self.molecules:
+ self.valid_orientations.append([])
+ wp_index = -1
+ for i, x in enumerate(self.wyckoffs):
+ self.valid_orientations[-1].append([])
+ for j, wp in enumerate(x):
+ wp_index += 1
+ allowed = orientation_in_wyckoff_position(mol, self.sg, wp_index, already_oriented=True, allow_inversion=self.allow_inversion)
+ if allowed is not False:
+ self.valid_orientations[-1][-1].append(allowed)
+ elif allowed is False:
+ self.valid_orientations[-1][-1].append([])
+
+ def check_compatible(self):
+ '''
check if the number of molecules is compatible with the
wyckoff positions
needs to improve later
- """
+ '''
N_site = [len(x[0]) for x in self.wyckoffs]
has_freedom = False
#remove WP's with no freedom once they are filled
|
make control flow abstract eval to shaped level
fixes | @@ -211,7 +211,7 @@ def while_loop(cond_fun, body_fun, init_val):
return tree_unflatten(body_tree, outs)
def _while_loop_abstract_eval(*args, **kwargs):
- return kwargs["body_jaxpr"].out_avals
+ return _map(raise_to_shaped, kwargs["body_jaxpr"].out_avals)
def _while_loop_translation_rule(c, axis_env, *args, **kwargs):
backend = kwargs.pop('backend')
@@ -363,7 +363,7 @@ def cond(pred, true_operand, true_fun, false_operand, false_fun):
return tree_unflatten(true_out_tree, out)
def _cond_abstract_eval(*args, **kwargs):
- return kwargs["true_jaxpr"].out_avals
+ return _map(raise_to_shaped, kwargs["true_jaxpr"].out_avals)
def _cond_translation_rule(c, axis_env, pred, *args, **kwargs):
backend = kwargs.pop("backend", None)
|
DOC: Adding examples to scipy.stats.mstat
Added examples for tmax and tmean. | @@ -1698,6 +1698,22 @@ def tmax(a, upperlimit=None, axis=0, inclusive=True):
-----
For more details on `tmax`, see `stats.tmax`.
+ Examples
+ --------
+ >>> import numpy as np
+ >>> from scipy.stats import mstats
+ >>> a = np.array([[6, 8, 3, 0],
+ ... [3, 9, 1, 2],
+ ... [8, 7, 8, 2],
+ ... [5, 6, 0, 2],
+ ... [4, 5, 5, 2]])
+ ...
+ ...
+ >>> mstats.tmax(a,4)
+ masked_array(data=[4, --, 3, 2],
+ mask=[False, True, False, False],
+ fill_value=999999)
+
"""
a, axis = _chk_asarray(a, axis)
am = trima(a, (None, upperlimit), (False, inclusive))
|
Alarms: Add device serial number field
Added insertion, via adapter_alarms.py, of onu_serial_number field to context for all onu related alarms | import structlog
import arrow
from voltha.protos.events_pb2 import AlarmEventType, AlarmEventSeverity,\
- AlarmEventState
+ AlarmEventState, AlarmEventCategory
+log = structlog.get_logger()
+
# TODO: In the device adapter, the following alarms are still TBD
# (Taken from openolt_alarms)
@@ -86,11 +88,20 @@ class AdapterAlarms:
"""
try:
current_context = {}
-
if isinstance(context_data, dict):
for key, value in context_data.iteritems():
current_context[key] = str(value)
+ ser_num = None
+ device = self.adapter_agent.get_device(device_id=self.device_id)
+ ser_num = device.serial_number
+
+ """
+ Only put in the onu serial numbers since the OLT does not currently have a serial number and the
+ value is the ip:port address.
+ """
+ if isinstance(context_data, dict) and '_onu' in device.type.lower():
+ current_context["onu_serial_number"] = ser_num
alarm_event = self.adapter_agent.create_alarm(
id=alarm_data.get('id', 'voltha.{}.{}.olt'.format(self.adapter_name,
self.device_id)),
|
Disable ST3 hovers once it is open in an LSP client.
Should fix | @@ -598,6 +598,7 @@ def notify_did_open(view: sublime.View):
config = config_for_scope(view)
client = client_for_view(view)
if client and config:
+ view.settings().set("show_definitions", False)
if view.file_name() not in document_states:
get_document_state(view.file_name())
if show_view_status:
|
Add missing 'node_modules' segment in path (--no-bin-links)
Thanks for the catch | @@ -165,8 +165,8 @@ def resolve_prettier_cli_path(view, plugin_path, st_project_path):
#
# check locally installed prettier using the '--no-bin-links' opion...
# and when symlinks aren't avail. see issue #146.
- project_prettier_path_nbl = os.path.join(st_project_path, 'prettier', 'prettier-bin.js')
- plugin_prettier_path_nbl = os.path.join(plugin_path, 'prettier', 'prettier-bin.js')
+ project_prettier_path_nbl = os.path.join(st_project_path, 'node_modules', 'prettier', 'prettier-bin.js')
+ plugin_prettier_path_nbl = os.path.join(plugin_path, 'node_modules', 'prettier', 'prettier-bin.js')
if os.path.exists(project_prettier_path_nbl):
return project_prettier_path_nbl
if os.path.exists(plugin_prettier_path_nbl):
|
Work around single stepping bug in GDB
Synchronous stepping runs in the background due to a bug introduced in
GDB. This patch removes stepping from this test until GDB has been
fixed. | @@ -245,10 +245,16 @@ def run_test():
print("Error - could not set pc to function")
breakpoint.delete()
- # Test the speed of the different step types
- test_result["step_time_si"] = test_step_type("si")
- test_result["step_time_s"] = test_step_type("s")
- test_result["step_time_n"] = test_step_type("n")
+## Stepping removed as a workaround for a GDB bug. Launchpad issue tracking this is here:
+## https://bugs.launchpad.net/gcc-arm-embedded/+bug/1700595
+#
+# # Test the speed of the different step types
+# test_result["step_time_si"] = test_step_type("si")
+# test_result["step_time_s"] = test_step_type("s")
+# test_result["step_time_n"] = test_step_type("n")
+ test_result["step_time_si"] = -1
+ test_result["step_time_s"] = -1
+ test_result["step_time_n"] = -1
# TODO,c1728p9 - test speed getting stack trace
# TODO,c1728p9 - test speed with cache turned on
# TODO,c1728p9 - check speed vs breakpoints
|
langkit.utils.types.TypeSet: add an "update" method
TN: | @@ -120,6 +120,15 @@ class TypeSet(object):
"""
return t in self.matched_types
+ def update(self, type_set):
+ """
+ Extend self to contain all types in ``type_set``.
+
+ :param TypeSet type_set: Types to include.
+ """
+ assert isinstance(type_set, TypeSet)
+ self.matched_types.update(type_set.matched_types)
+
def include(self, t):
"""
Include a class and all of its subclasses.
|
improved exception handling
catch unexpected exceptions in sync threads
don't crash thread on missing Dropbox folder | @@ -6,7 +6,6 @@ Created on Wed Oct 31 16:23:13 2018
@author: samschott
"""
# system imports
-import sys
import os
import os.path as osp
import platform
@@ -443,22 +442,15 @@ class UpDownSync(object):
# Helper functions
# ====================================================================================
- def ensure_dropbox_folder_present(self, raise_exception=False):
+ def ensure_dropbox_folder_present(self):
"""
Checks if the Dropbox folder still exists where we expect it to be.
- :param bool raise_exception: If ``True``, raises an exception when the folder
- cannot be found. If ``False``, this function only create an entry in the log.
- Defaults to ``False``.
:raises: DropboxDeletedError
"""
if not osp.isdir(self.dropbox_path):
- exc = DropboxDeletedError("Dropbox folder has been moved or deleted.")
- logger.error("Dropbox folder has been moved or deleted.",
- exc_info=(type(exc), exc, None))
- if raise_exception:
- raise exc
+ raise DropboxDeletedError("Dropbox folder has been moved or deleted.")
def to_dbx_path(self, local_path):
"""
@@ -765,6 +757,7 @@ class UpDownSync(object):
:return: (list of file events, time_stamp)
:rtype: (list, float)
"""
+ self.ensure_dropbox_folder_present()
try:
events = [self.queue_to_upload.get(timeout=timeout)]
except queue.Empty:
@@ -1651,6 +1644,8 @@ def download_worker(sync, syncing, running, queue_downloading):
logger.exception("Sync error", exc_info=True)
syncing.clear() # stop syncing
running.clear() # shutdown threads
+ except Exception:
+ logger.exception("Unexpected error")
finally:
# clear queue_downloading
queue_downloading.queue.clear()
@@ -1677,6 +1672,7 @@ def upload_worker(sync, syncing, running, queue_uploading):
while running.is_set():
+ try:
# wait until resumed, check collect changes while inactive
if not syncing.is_set():
syncing.wait()
@@ -1689,26 +1685,28 @@ def upload_worker(sync, syncing, running, queue_uploading):
for e in events:
queue_uploading.put(e.src_path)
- # check if local directory still exists
- sync.ensure_dropbox_folder_present(raise_exception=True)
-
if len(events) > 0:
# apply changes
with sync.lock:
- try:
logger.info(SYNCING)
sync.apply_local_changes(events, local_cursor)
logger.info(IDLE)
+ else:
+ # just update local cursor
+ if syncing.is_set():
+ sync.last_sync = local_cursor
except CONNECTION_ERRORS as e:
logger.info(DISCONNECTED)
logger.debug(e)
disconnected_signal.send()
syncing.clear() # must be started again from outside
- else:
- # just update local cursor
- if syncing.is_set():
- sync.last_sync = local_cursor
-
+ except DropboxDeletedError:
+ logger.exception("Dropbox folder has been moved or deleted.")
+ syncing.clear() # stop syncing
+ running.clear() # shutdown threads
+ except Exception:
+ logger.exception("Unexpected error")
+ finally:
queue_uploading.queue.clear()
|
Update celery-deployment.yaml
comment out probes | @@ -46,16 +46,16 @@ spec:
readOnly: true
mountPath: /opt/reopt/keys.py
subPath: {{ .Values.appEnv }}-keys.py
- readinessProbe:
- exec:
- command: ["pgrep", "-f", "bin/celery"]
- periodSeconds: 5
- timeoutSeconds: 3
- failureThreshold: 3
- livenessProbe:
- exec:
- command: ["pgrep", "-f", "bin/celery"]
- initialDelaySeconds: 30
- periodSeconds: 60
- timeoutSeconds: 30
- failureThreshold: 10
+# readinessProbe:
+# exec:
+# command: ["pgrep", "-f", "bin/celery"]
+# periodSeconds: 5
+# timeoutSeconds: 3
+# failureThreshold: 3
+# livenessProbe:
+# exec:
+# command: ["pgrep", "-f", "bin/celery"]
+# initialDelaySeconds: 30
+# periodSeconds: 60
+# timeoutSeconds: 30
+# failureThreshold: 10
|
[microTVM][Zephyr] Add recommended heap size for NRF and qemu_x86
This PR sets recommended heap size for qemu_x86 and NRF board to fix memory size with models like VWW using AoT host driven executor. | "is_qemu": false,
"fpu": true,
"vid_hex": "1366",
- "pid_hex": "1055"
+ "pid_hex": "1055",
+ "recommended_heap_size_bytes": 368640
},
"nucleo_f746zg": {
"board": "nucleo_f746zg",
"fpu": true,
"vid_hex": "0483",
"pid_hex": "374b",
- "recommended_heap_size_bytes": 512000
+ "recommended_heap_size_bytes": 524288
},
"qemu_cortex_r5": {
"board": "qemu_cortex_r5",
"is_qemu": true,
"fpu": true,
"vid_hex": "",
- "pid_hex": ""
+ "pid_hex": "",
+ "recommended_heap_size_bytes": 524288
},
"stm32f746g_disco": {
"board": "stm32f746g_disco",
|
Add the `add_header` job to the CI/CD
The `add_header` job checks the file headers for errors. This is helpfull when a
new file is introduced, since the Lincence information can easily be forgotten. | @@ -7,6 +7,18 @@ on:
- cron: '0 3 * * *'
jobs:
+ add_header:
+ name: Add header lint
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v2
+ with:
+ # We need the history to determine the file creation date.
+ fetch-depth: 0
+ - name: Check headers
+ shell: bash
+ run: |
+ python3 tools/add_header --dry-run --disable-progress-bar
setup:
runs-on: ubuntu-latest
steps:
|
The BaseStream class now implements __anext__()
When reading chuncks, the write() method of AsyncIterablePayload in
aiohttp3 calls: chunk = await self._iter.__anext__(). WB's BaseStream
class must implement __anext__(), where the read() method is called. | @@ -17,6 +17,20 @@ class BaseStream(asyncio.StreamReader, metaclass=abc.ABCMeta):
self.readers = {}
self.writers = {}
+ def __aiter__(self):
+ return self
+
+ # TODO: Add more note on `AsyncIterablePayload` and its `write()` method in aiohttp3
+ # TODO: Improve the BaseStream with `aiohttp.streams.AsyncStreamReaderMixin`
+ async def __anext__(self):
+ try:
+ chunk = await self.read()
+ except EOFError:
+ raise StopAsyncIteration
+ if chunk == b'':
+ raise StopAsyncIteration
+ return chunk
+
@abc.abstractproperty
def size(self):
pass
|
integrations: Remove link underline from back arrow icon.
Fixes | <div id="integration-instructions-group">
<div id="integration-instruction-block" class="integration-instruction-block">
- <a href="/integrations" id="integration-list-link"><i class="icon-vector-circle-arrow-left"></i><span>Back to list</span></a>
+ <a href="/integrations" id="integration-list-link" class="no-underline"><i class="icon-vector-circle-arrow-left"></i><span>Back to list</span></a>
<h3 class="name"></h3>
<div class="categories"></div>
</div>
|
Update api.rst
Corrected typo in "tojson" example, `const names = {{ names|tojson }};` was `const names = {{ names|tosjon }};` | @@ -248,7 +248,7 @@ HTML ``<script>`` tags.
.. sourcecode:: html+jinja
<script>
- const names = {{ names|tosjon }};
+ const names = {{ names|tojson }};
renderChart(names, {{ axis_data|tojson }});
</script>
|
[ci] Fix mac pipeline (use python 2 in CI scripts)
determine_tests_to_run.py uses python2 on mac, so we need to keep compatibility there | @@ -234,7 +234,12 @@ if __name__ == "__main__":
RAY_CI_DASHBOARD_AFFECTED = 1
RAY_CI_DOC_AFFECTED = 1
else:
- print(f"Unhandled source code change: {changed_file}", file=sys.stderr)
+ print(
+ "Unhandled source code change: {changed_file}".format(
+ changed_file=changed_file
+ ),
+ file=sys.stderr,
+ )
RAY_CI_ML_AFFECTED = 1
RAY_CI_TUNE_AFFECTED = 1
|
Improve get-ami-list.py script to support oldest tags
In pre-release public version of cfncluster, the amis.txt file doesn't contain
the distro comment on the top, because centos6 only was supported.
Now the script works also with tags like v0.0.7, v1.0.0-beta, etc. | @@ -47,7 +47,14 @@ def build_release_ami_list(scratch_dir, tag):
active_distro = m.groups()[0]
amis[active_distro] = []
else:
- m = re.match('.*:\s*(ami-[a-zA-Z0-9]*)', line)
+ m = re.match('.*:?\s*(ami-[a-zA-Z0-9]*)', line)
+ if active_distro != None:
+ amis[active_distro].append(m.groups()[0])
+ else:
+ # In old tags, the amis.txt file doesn't contain the distro comment on top
+ # because centos6 only was supported
+ active_distro = "centos6"
+ amis[active_distro] = []
amis[active_distro].append(m.groups()[0])
return amis
|
Update conf.json
Removed QRadar playbook from skipped | "MaxMind GeoIP2": "Issue 18932.",
"Exabeam": "Issue 19371",
"McAfee ESM-v10": "Issue 20225",
- "QRadar Indicator Hunting Test": "Issue 21150",
"_comment": "~~~ UNSTABLE ~~~",
"ServiceNow": "Instance goes to hibernate every few hours",
|
[query] Fix flushing on HadoopFS.toPositionedOutputStream
fixes | @@ -32,7 +32,7 @@ object HadoopFS {
override def write(bytes: Array[Byte], off: Int, len: Int): Unit = os.write(bytes, off, len)
- override def flush(): Unit = os.flush()
+ override def flush(): Unit = if (!closed) os.flush()
override def close(): Unit = {
if (!closed) {
|
Update quickstart.rst
A miss letter | @@ -30,7 +30,7 @@ Compare Graphene's *code-first* approach to building a GraphQL API with *schema-
.. _Ariadne: https://ariadne.readthedocs.io
-Graphene is fully featured with integrations for the most popular web frameworks and ORMs. Graphene produces schemas tha are fully compliant with the GraphQL spec and provides tools and patterns for building a Relay-Compliant API as well.
+Graphene is fully featured with integrations for the most popular web frameworks and ORMs. Graphene produces schemas that are fully compliant with the GraphQL spec and provides tools and patterns for building a Relay-Compliant API as well.
An example in Graphene
----------------------
|
OverlandFlow component TypeError fix
Fixing TypeError that pops up when generate_overland_flow_deAlmeida.py
is run. | @@ -690,7 +690,7 @@ class OverlandFlow(Component):
discharge_vals = discharge_vals.reshape(self.grid.number_of_nodes, 4)
- discharge_vals = discharge_vals.sum(axis=1.0)
+ discharge_vals = discharge_vals.sum(axis=1)
return discharge_vals
|
MessageWidget : Fix Python 3.8 DeprecationWarning
```
/Users/john/dev/build/gaffer/python/GafferUI/MessageWidget.py:1350: DeprecationWarning: an integer is required (got type Enum). Implicit conversion to integers using __int__ is deprecated, and may be removed in a future version of Python.
``` | @@ -1347,7 +1347,7 @@ class _MessageTableFilterModel( QtCore.QSortFilterProxyModel ) :
def filterAcceptsRow( self, sourceRow, sourceParent ) :
- levelIndex = self.sourceModel().index( sourceRow, _MessageTableModel.Column.Level, sourceParent )
+ levelIndex = self.sourceModel().index( sourceRow, int( _MessageTableModel.Column.Level ), sourceParent )
return self.sourceModel().data( levelIndex, _MessageTableModel.ValueRole ) <= self.__maxLevel
# The base TabelModel representing the underlying message data.
|
Fix reading from length-delimited input
`protobuff::parse_from_reader` no longer correctly handles
length-delimited protobuf streams correctly. The proper way to read
these streams is via `CodedInputStream::read_message`. | @@ -457,8 +457,32 @@ fn restore_block(source: &mut protobuf::CodedInputStream) -> Result<Option<Block
return Ok(None);
}
- let block = protobuf::parse_from_reader(source)
- .map_err(|err| CliError::EnvironmentError(format!("Failed to parse block: {}", err)))?;
+ source
+ .read_message()
+ .map(Some)
+ .map_err(|err| CliError::EnvironmentError(format!("Failed to parse block: {}", err)))
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use protobuf::CodedInputStream;
+
+ #[test]
+ fn backup_and_restore() {
+ let mut buffer: Vec<u8> = vec![];
+
+ let mut block = Block::new();
+ block.set_header_signature("abc123".into());
- Ok(Some(block))
+ backup_block(&block, &mut buffer).unwrap();
+ let mut is = CodedInputStream::from_bytes(&buffer);
+
+ let restored_block = restore_block(&mut is).unwrap();
+
+ assert_eq!(Some(block), restored_block);
+
+ assert_eq!(None, restore_block(&mut is).unwrap());
+ }
}
|
Issue with Multimaterial on Hybrid render.
PURPOSE
This is regression after Multimaterial doesn't assign any material to shape in hybrid.
EFFECT OF CHANGE
Fixed assigning material first to shape in hybrid if multimaterial is used. | @@ -184,6 +184,10 @@ class Shape(pyrpr.Shape):
super().set_material(material)
+ def set_material_faces(self, material, face_indices: np.array):
+ if not self.materials:
+ self.set_material(material)
+
@class_ignore_unsupported
class Mesh(pyrpr.Mesh, Shape):
|
Update release instructions to include cheatsheet
* Update release instructions to include cheatsheet
Update release instructions to include cheatsheet
* minor update
* edit release cheatsheet pdf link example | * Click 'Publish release' and the release will go live.
* Wait ~10 minutes and then locally test that the PyPi package is available and working with the latest release version, ask team members to also independently verify.
+### Release Cheatsheet
+
+* If intending to create a new cheatsheet for the release, refer to [autogluon-doc-utils README.md](https://github.com/Innixma/autogluon-doc-utils) for instructions on creating a new cheatsheet.
+* If a cheatsheet exists for `0.x.y` (or `0.x`), update the `docs/cheatsheet.rst` url paths ([example](https://github.com/awslabs/autogluon/blob/0.4.1/docs/cheatsheet.rst)) in branch `0.x.y` to the correct location ([example for v0.4.0 and v0.4.1](https://github.com/Innixma/autogluon-doc-utils/tree/main/docs/cheatsheets/v0.4.0)).
+ * Example urls: [JPEG](https://raw.githubusercontent.com/Innixma/autogluon-doc-utils/main/docs/cheatsheets/v0.4.0/autogluon-cheat-sheet.jpeg), [PDF](https://nbviewer.org/github/Innixma/autogluon-doc-utils/blob/main/docs/cheatsheets/v0.4.0/autogluon-cheat-sheet.pdf)
+ * Do NOT do this for `stable` branch or `master` branch, instead have them continue pointing to the [stable cheatsheet files](https://github.com/Innixma/autogluon-doc-utils/tree/main/docs/cheatsheets/stable). This is to ensure that as we release new versions of the cheatsheet, old docs will still refer to the correct cheatsheet for their version.
+ * Finally, update the stable files [here](https://github.com/Innixma/autogluon-doc-utils/tree/main/docs/cheatsheets/stable) to reflect the latest released version of the cheatsheet.
+
### Post Release
* IF THERE IS A MAJOR ISSUE: Do an emergency hot-fix and a new release ASAP. Releases cannot be deleted, so a new release will have to be done.
|
Fix definition of ssl_protocol
The syntax for TLS 1.0 is incorrect for Apache servers
which appear to be the only users of this variable.
Depends-On: | ## SSL
# These do not need to be configured unless you're creating certificates for
# services running behind Apache (currently, Horizon and Keystone).
-ssl_protocol: "ALL -SSLv2 -SSLv3 -TLSv1.0 -TLSv1.1"
+ssl_protocol: "ALL -SSLv2 -SSLv3 -TLSv1 -TLSv1.1"
# Cipher suite string from https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/
ssl_cipher_suite: "ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:RSA+AESGCM:RSA+AES:!aNULL:!MD5:!DSS"
|
Use the highest bitrate stream regardless of codec
The get_audio_only function defaults to filtering for mp4 streams.
This seems arbitrary as ffmpeg has the capabilities for other codecs
too and higher quality may be achieved this way. | @@ -180,7 +180,13 @@ async def download_song(self, songObj: SongObj) -> None:
else:
youtubeHandler = YouTube(songObj.get_youtube_link())
- trackAudioStream = youtubeHandler.streams.get_audio_only()
+ trackAudioStream = youtubeHandler.streams.filter(only_audio=True).order_by('bitrate').last()
+
+ if not trackAudioStream:
+ print(f"Unable to get audio stream for \"{songObj.get_song_name()}\" "
+ f"by \"{songObj.get_contributing_artists()[0]}\" "
+ f"from video \"{songObj.get_youtube_link()}\"")
+ return None
downloadedFilePath = await self._download_from_youtube(convertedFileName, tempFolder,
trackAudioStream)
|
Update signals doc - clarification on EmbeddedDocument
Since there is a .save() method on EmbeddedDocument, you could be tempted to attach a pre_save event to an EmbeddedDocument This update is an attempt to make this clearer. | @@ -113,6 +113,10 @@ handlers within your subclass::
signals.pre_save.connect(Author.pre_save, sender=Author)
signals.post_save.connect(Author.post_save, sender=Author)
+.. warning::
+
+ Note that EmbeddedDocument only supports pre/post_init signals. pre/post_save, etc should be attached to Document's class only. Attaching pre_save to an EmbeddedDocument is ignored silently.
+
Finally, you can also use this small decorator to quickly create a number of
signals and attach them to your :class:`~mongoengine.Document` or
:class:`~mongoengine.EmbeddedDocument` subclasses as class decorators::
|
digest: Remove comments from get_hot_topics.
The code is self explanatory. | @@ -187,11 +187,8 @@ def get_hot_topics(
if topic.stream_id() in stream_ids
]
- # Start with the two most diverse topics.
hot_topics = heapq.nlargest(2, topics, key=DigestTopic.diversity)
- # Pad out our list up to MAX_HOT_TOPICS_TO_BE_INCLUDED_IN_DIGEST items,
- # using the topics' length (aka message count) as the secondary filter.
for topic in heapq.nlargest(MAX_HOT_TOPICS_TO_BE_INCLUDED_IN_DIGEST, topics, key=DigestTopic.length):
if topic not in hot_topics:
hot_topics.append(topic)
|
[cleanup] return value of addCommonscat is never used
remove returning True in addCommonscat
unpack add_text to variables | @@ -285,7 +285,7 @@ class CommonscatBot(SingleSiteBot):
pywikibot.output('Commonscat link at {} to Category:{} is ok'
.format(page.title(),
currentCommonscatTarget))
- return True
+ return
if checkedCommonscatTarget:
# We have a new Commonscat link, replace the old one
@@ -293,7 +293,7 @@ class CommonscatBot(SingleSiteBot):
currentCommonscatTarget,
primaryCommonscat,
checkedCommonscatTarget, LinkText, Note)
- return True
+ return
# Commonscat link is wrong
commonscatLink = self.findCommonscatLink(page)
@@ -314,12 +314,10 @@ class CommonscatBot(SingleSiteBot):
else:
textToAdd = '{{%s|%s}}' % (primaryCommonscat,
commonscatLink)
- rv = add_text(page, textToAdd,
+ _, _, always = add_text(page, textToAdd,
self.getOption('summary'),
always=self.getOption('always'))
- self.options['always'] = rv[2]
- return True
- return True
+ self.options['always'] = always
def changeCommonscat(
self, page=None, oldtemplate='', oldcat='',
|
Reset the mark counter on clear all marks
also, add next mark and prev mark key shortcuts | @@ -83,6 +83,8 @@ In "Move" mode the following keys are active:
- "n" will place a new mark at the site of the cursor
- "m" will move the current mark (if any) to the site of the cursor
- "d" will delete the current mark (if any)
+- "j" will select the previous mark (if any)
+- "k" will select the next mark (if any)
**User Configuration**
@@ -372,6 +374,7 @@ class PixTable(GingaPlugin.LocalPlugin):
self.w.marks.append_text('None')
self.w.marks.set_index(0)
self.mark_selected = None
+ self.mark_index = 0
def set_font_size_cb(self, w, index):
self.fontsize = self.fontsizes[index]
@@ -607,6 +610,37 @@ class PixTable(GingaPlugin.LocalPlugin):
self.redo()
return True
+ def prev_mark(self):
+ if len(self.marks) <= 1 or self.mark_selected is None:
+ # no previous
+ return
+
+ idx = self.marks.index(self.mark_selected)
+ idx = idx - 1
+ if idx < 0:
+ return
+ tag = self.marks[idx]
+ if tag == 'None':
+ tag = None
+ self.select_mark(tag)
+
+ def next_mark(self):
+ if len(self.marks) <= 1:
+ # no next
+ return
+
+ if self.mark_selected is None:
+ idx = 0
+ else:
+ idx = self.marks.index(self.mark_selected)
+ idx = idx + 1
+ if idx >= len(self.marks):
+ return
+ tag = self.marks[idx]
+ if tag == 'None':
+ tag = None
+ self.select_mark(tag)
+
def keydown_cb(self, canvas, event, data_x, data_y, viewer):
if event.key == 'n':
caption = self.w.caption.get_text().strip()
@@ -621,6 +655,12 @@ class PixTable(GingaPlugin.LocalPlugin):
elif event.key == 'd':
self.clear_mark_cb()
return True
+ elif event.key == 'j':
+ self.prev_mark()
+ return True
+ elif event.key == 'k':
+ self.next_mark()
+ return True
return False
def draw_cb(self, canvas, tag):
|
Bump data_functional_testing
Fixes | @@ -51,7 +51,7 @@ DICT_URL = {
"url": ["https://github.com/ivadomed/model_find_disc_t2/archive/r20200928.zip"],
"description": "Intervertebral disc detection model trained on T2-weighted images."},
"data_functional_testing": {
- "url": ["https://github.com/ivadomed/data_functional_testing/archive/r20210617.zip"],
+ "url": ["https://github.com/ivadomed/data_functional_testing/archive/r20211002.zip"],
"description": "Data used for functional testing in Ivadomed."},
"data_axondeepseg_sem": {
"url": ["https://github.com/axondeepseg/data_axondeepseg_sem/archive/r20211130.zip"],
|
Remove command to set step-mode on
Fix issue which shows that gdb's skip functionality is broken.
This occurs when setting step-mode to be on. | @@ -9974,7 +9974,6 @@ if __name__ == "__main__":
gdb.execute("set confirm off")
gdb.execute("set verbose off")
gdb.execute("set pagination off")
- gdb.execute("set step-mode on")
gdb.execute("set print elements 0")
# gdb history
|
chore: mirror to GitLab
This commit after every merge, will push the latest version to GitLab. | @@ -157,3 +157,12 @@ jobs:
branch: gh-pages
directory: gh-pages
github_token: ${{ secrets.GITHUB_TOKEN }}
+ - name: Mirror to GitLab
+ run: |
+ git clone https://github.com/Kubeinit/kubeinit.git kubeinit_mirror
+ cd kubeinit_mirror
+ git branch -r | grep -v '\->' | while read remote; do git branch --track "${remote#origin/}" "$remote"; done
+ git fetch --all
+ git pull --all
+ sed -i 's/https:\/\/github\.com\/Kubeinit\/kubeinit\.git/https:\/\/github-access:${{ secrets.GITLAB_TOKEN }}@gitlab\.com\/kubeinit\/kubeinit.git/g' .git/config
+ git push --force --all origin
|
Update service.py
I created some sub menu items for Standard and Feature audits. Very simple, just creates events for entering and exiting these two sub menus so a user can configure slides to show. | @@ -48,7 +48,7 @@ software_update_script: single|str|None
self.machine.events.wait_for_any_event(self.config['mode_settings']['enter_events']): "ENTER",
self.machine.events.wait_for_any_event(self.config['mode_settings']['up_events']): "UP",
self.machine.events.wait_for_any_event(self.config['mode_settings']['down_events']): "DOWN",
- })
+ }, self.machine.clock.loop)
async def _run(self):
while True:
@@ -143,13 +143,13 @@ software_update_script: single|str|None
await self._make_menu(self._load_diagnostic_light_menu_entries())
# Audits
- @staticmethod
- def _load_audit_menu_entries() -> List[ServiceMenuEntry]:
+ # @staticmethod
+ def _load_audit_menu_entries(self) -> List[ServiceMenuEntry]: # () -> List[ServiceMenuEntry]:
"""Return the audit menu items with label and callback."""
return [
# ServiceMenuEntry("Earning Audits", None),
- # ServiceMenuEntry("Standard Audits", None),
- # ServiceMenuEntry("Feature Audits", None),
+ ServiceMenuEntry("Standard Audits", self._audit_standard_menu), # None),
+ ServiceMenuEntry("Feature Audits", self._audit_feature_menu), # None),
]
async def _audits_menu(self):
@@ -472,3 +472,30 @@ software_update_script: single|str|None
value_position = len(values) - 1
self.machine.settings.set_setting_value(items[position].name, values[value_position])
self._update_settings_slide(items, position, is_change=True)
+
+ # ENTER hit when on Standard Audit sub menu, post events for user to show slides
+ async def _audit_standard_menu(self):
+ self.machine.events.post("service_audit_standard_start")
+
+ while True:
+ key = await self._get_key()
+ if key == 'ESC':
+ break
+ if key == 'ENTER':
+ pass
+
+ self.machine.events.post("service_audit_standard_stop")
+
+ # ENTER hit when on Features Audit sub menu, post events for user to show slides
+ async def _audit_feature_menu(self):
+ self.machine.events.post("service_audit_feature_start")
+
+ while True:
+ key = await self._get_key()
+ if key == 'ESC':
+ break
+ if key == 'ENTER':
+ pass
+
+ self.machine.events.post("service_audit_feature_stop")
+
|
Undercloud needs both collectd-disk and collectd-python
Unclear what has changed in OSP13, however I amd having an issue
getting collectd to start without ensuring these packages are now
installed | - collectd
- collectd-apache
- collectd-ceph
+ - collectd-disk
- collectd-mysql
- - collectd-turbostat
- collectd-ping
+ - collectd-python
+ - collectd-turbostat
# (sai) Since we moved to containers we don't have java installed on the host
# anymore but it is needed for collectd-java
|
Update setup-remote.md
Fixes | @@ -64,9 +64,17 @@ One option is to get an Infura account.
A simpler option is to bypass the need for an account! Just change to RPCs that don't need Infura. The command below replaces Infura RPCs with public ones in `network-config.yaml`:
-`console
+* Linux users:
+```console
sed -i 's#https://polygon-mainnet.infura.io/v3/$WEB3_INFURA_PROJECT_ID#https://polygon-rpc.com/#g; s#https://polygon-mumbai.infura.io/v3/$WEB3_INFURA_PROJECT_ID#https://rpc-mumbai.maticvigil.com#g' ~/.brownie/network-config.yaml
-`
+```
+
+* MacOS users:
+```console
+brew install gnu-sed
+
+gsed -i 's#https://polygon-mainnet.infura.io/v3/$WEB3_INFURA_PROJECT_ID#https://polygon-rpc.com/#g; s#https://polygon-mumbai.infura.io/v3/$WEB3_INFURA_PROJECT_ID#https://rpc-mumbai.maticvigil.com#g' ~/.brownie/network-config.yaml
+```
#### RPCs wrapup
|
[msgpack] look for `datetime.datetime` in keys also while packing
and `del` the old if the encoded key is different form the original one. | @@ -224,6 +224,10 @@ class Serial(object):
def datetime_encoder(obj):
if isinstance(obj, dict):
for key, value in six.iteritems(obj.copy()):
+ encodedkey = datetime_encoder(key)
+ if key != encodedkey:
+ del obj[key]
+ key = encodedkey
obj[key] = datetime_encoder(value)
return dict(obj)
elif isinstance(obj, (list, tuple)):
|
facts: fix deployments with different net interface names
Deployments when radosgws don't have the same names for
network interface.
Closes: | block:
- name: set_fact _interface
set_fact:
- _interface: "{{ (radosgw_interface | replace('-', '_')) }}"
+ _interface: "{{ (hostvars[item]['radosgw_interface'] | replace('-', '_')) }}"
+ loop: "{{ groups.get(rgw_group_name, []) }}"
+ delegate_to: "{{ item }}"
+ delegate_facts: true
+ run_once: true
- name: set_fact _radosgw_address to radosgw_interface - ipv4
set_fact:
- _radosgw_address: "{{ hostvars[inventory_hostname]['ansible_facts'][_interface][ip_version]['address'] }}"
+ _radosgw_address: "{{ hostvars[item]['ansible_facts'][hostvars[item]['_interface']][ip_version]['address'] }}"
+ loop: "{{ groups.get(rgw_group_name, []) }}"
+ delegate_to: "{{ item }}"
+ delegate_facts: true
when: ip_version == 'ipv4'
- name: set_fact _radosgw_address to radosgw_interface - ipv6
set_fact:
- _radosgw_address: "{{ hostvars[inventory_hostname]['ansible_facts'][_interface][ip_version][0]['address'] | ansible.utils.ipwrap }}"
+ _radosgw_address: "{{ hostvars[item]['ansible_facts'][hostvars[item]['_interface']][ip_version][0]['address'] | ipwrap }}"
+ loop: "{{ groups.get(rgw_group_name, []) }}"
+ delegate_to: "{{ item }}"
+ delegate_facts: true
when: ip_version == 'ipv6'
- name: set_fact rgw_instances without rgw multisite
|
Avoid log warning when closing is underway (on purpose)
Related-Bug: | @@ -317,6 +317,7 @@ class ZookeeperJobBoard(base.NotifyingJobBoard):
self._emit_notifications = bool(emit_notifications)
self._connected = False
self._suspended = False
+ self._closing = False
self._last_states = collections.deque(maxlen=self.STATE_HISTORY_LENGTH)
def _try_emit(self, state, details):
@@ -748,6 +749,10 @@ class ZookeeperJobBoard(base.NotifyingJobBoard):
self._last_states.appendleft(state)
if state == k_states.KazooState.LOST:
self._connected = False
+ # When the client is itself closing itself down this will be
+ # triggered, but in that case we expect it, so we don't need
+ # to emit a warning message.
+ if not self._closing:
LOG.warning("Connection to zookeeper has been lost")
elif state == k_states.KazooState.SUSPENDED:
LOG.warning("Connection to zookeeper has been suspended")
@@ -790,6 +795,7 @@ class ZookeeperJobBoard(base.NotifyingJobBoard):
def close(self):
if self._owned:
LOG.debug("Stopping client")
+ self._closing = True
kazoo_utils.finalize_client(self._client)
if self._worker is not None:
LOG.debug("Shutting down the notifier")
@@ -818,6 +824,7 @@ class ZookeeperJobBoard(base.NotifyingJobBoard):
if timeout is not None:
timeout = float(timeout)
self._client.start(timeout=timeout)
+ self._closing = False
except (self._client.handler.timeout_exception,
k_exceptions.KazooException):
excp.raise_with_cause(excp.JobFailure,
|
Fix mocking time
When running on Centos the side_effect was returning a MagicMock
object instead of the intended int. | @@ -717,7 +717,7 @@ class TestS3ApiMultiUpload(S3ApiTestCase):
'Response Status': '201 Created',
'Errors': [],
})])
- mock_time.return_value.time.side_effect = (
+ mock_time.time.side_effect = (
1, # start_time
12, # first whitespace
13, # second...
@@ -769,7 +769,7 @@ class TestS3ApiMultiUpload(S3ApiTestCase):
'Response Status': '400 Bad Request',
'Errors': [['some/object', '403 Forbidden']],
})])
- mock_time.return_value.time.side_effect = (
+ mock_time.time.side_effect = (
1, # start_time
12, # first whitespace
13, # second...
@@ -818,7 +818,7 @@ class TestS3ApiMultiUpload(S3ApiTestCase):
'Response Status': '400 Bad Request',
'Errors': [['some/object', '404 Not Found']],
})])
- mock_time.return_value.time.side_effect = (
+ mock_time.time.side_effect = (
1, # start_time
12, # first whitespace
13, # second...
|
Rename methods
I renamed from basic variables / sets to pyomo-logic-names
as basic variables is a term in solving LP (simplex algorithm) | @@ -29,8 +29,8 @@ class BaseModel(po.ConcreteModel):
If this value is true, the set, variables, constraints, etc. are added,
automatically when instantiating the model. For sequential model
building process set this value to False
- and use methods `_add_basic_sets`, `_add_basic_variables`,
- `_add_blocks`, `_add_objective`
+ and use methods `_add_parent_block_sets`,
+ `_add_parent_block_variables`, `_add_blocks`, `_add_objective`
"""
CONSTRAINT_GROUPS = []
@@ -61,18 +61,18 @@ class BaseModel(po.ConcreteModel):
def _construct(self):
"""
"""
- self._add_basic_sets()
- self._add_basic_variables()
- self._add_blocks()
+ self._add_parent_block_sets()
+ self._add_parent_block_variables()
+ self._add_child_blocks()
self._add_objective()
- def _add_basic_sets(self):
+ def _add_parent_block_sets(self):
pass
- def _add_basic_variables(self):
+ def _add_parent_block_variables(self):
pass
- def _add_blocks(self):
+ def _add_child_blocks(self):
"""
"""
# loop over all constraint groups to add constraints to the model
@@ -231,7 +231,7 @@ class Model(BaseModel):
def __init__(self, energysystem, **kwargs):
super().__init__(energysystem, **kwargs)
- def _add_basic_sets(self):
+ def _add_parent_block_sets(self):
"""
"""
# set with all nodes
@@ -261,7 +261,7 @@ class Model(BaseModel):
hasattr(v, 'bidirectional')],
ordered=True, dimen=2, within=self.FLOWS)
- def _add_basic_variables(self):
+ def _add_parent_block_variables(self):
"""
"""
self.flow = po.Var(self.FLOWS, self.TIMESTEPS,
|
Update filtersets.md
corrected typos on the page,
an issue report has also been submitted at
regards, | @@ -34,12 +34,12 @@ To utilize a filter set in a subclass of one of NetBox's generic views (such as
```python
# views.py
from netbox.views.generic import ObjectListView
-from .filtersets import MyModelFitlerSet
+from .filtersets import MyModelFilterSet
from .models import MyModel
class MyModelListView(ObjectListView):
queryset = MyModel.objects.all()
- filterset = MyModelFitlerSet
+ filterset = MyModelFilterSet
```
To enable a filter set on a REST API endpoint, set the `filterset_class` attribute on the API view:
|
Update unit-matrixwrapper.cc
Remove constraint on total number of bosons in test, to solve At the moment Matrix Wrapper does not support symmetries, thus we should not use symmetries in the tests (such as total number conservation). | @@ -46,7 +46,7 @@ std::vector<netket::json> GetHamiltonianInputs() {
{"Graph",
{{"Name", "Hypercube"}, {"L", 2}, {"Dimension", 2}, {"Pbc", false}}},
{"Hamiltonian",
- {{"Name", "BoseHubbard"}, {"U", 4.0}, {"Nmax", 2}, {"Nbosons", 2}}}};
+ {{"Name", "BoseHubbard"}, {"U", 4.0}, {"Nmax", 2}}}};
input_tests.push_back(pars);
|
fix_inv_root.py edited online with Bitbucket
HG--
branch : fixes.fix_root.20170511 | @@ -38,7 +38,6 @@ def fix():
logging.info("Checking Loste&Found object")
lostfound_model = ObjectModel.objects.get(uuid="b0fae773-b214-4edf-be35-3468b53b03f2")
lf = Object.objects.filter(model=lostfound_model.id).count()
- print lf
if lf == 0:
# Create missed "Lost&Found"
logging.info(" ... creating missed Lost&Found")
@@ -70,6 +69,14 @@ def fix():
logging.info(" ... removing duplicated Lost&Found %s", l)
l.delete()
+ if Object.objects.get(name="Global Lost&Found").container != Object.objects.get(name="Root",model=root_model).id:
+ logging.info("Global Lost&Found object not valid container - fix") # fix
+ o = Object.objects.get(name="Global Lost&Found")
+ o.container = Object.objects.get(name="Root").id
+ o.save()
+ else:
+ logging.info("Global Lost&Found object container is valid")
+
|
ebuild.ebd: force verbose error output for all die() failures
And avoid irrelevant tracebacks for internal IPC errors. | @@ -473,12 +473,21 @@ def run_generic_phase(pkg, phase, env, userpriv, sandbox, fd_pipes=None,
if isinstance(e, ebd_ipc.IpcError):
# notify bash side of IPC error
ebd.write(e.ret)
- ebd.shutdown_processor()
- release_ebuild_processor(ebd)
if isinstance(e, ebd_ipc.IpcInternalError):
# show main exception cause for internal IPC errors
+ ebd.shutdown_processor(force=True)
raise e.__cause__
- elif isinstance(e, IGNORED_EXCEPTIONS + (ProcessorError, format.GenericBuildError,)):
+ try:
+ ebd.shutdown_processor()
+ except ProcessorError as pe:
+ # catch die errors during shutdown
+ e = pe
+ release_ebuild_processor(ebd)
+ if isinstance(e, ProcessorError):
+ # force verbose die output
+ e._verbosity = 1
+ raise e
+ elif isinstance(e, IGNORED_EXCEPTIONS + (format.GenericBuildError,)):
raise
raise format.GenericBuildError(
f"Executing phase {phase}: Caught exception: {e}") from e
|
fix(futures_roll_yield.py): fix get_roll_yield_bar interface
fix get_roll_yield_bar interface | @@ -118,7 +118,7 @@ def get_roll_yield_bar(
if type_method == "var":
df = pd.DataFrame()
- for market in ["dce", "cffex", "shfe", "czce"]:
+ for market in ["dce", "cffex", "shfe", "czce", "gfex"]:
df = pd.concat(
[
df,
@@ -173,9 +173,9 @@ def get_roll_yield_bar(
if __name__ == "__main__":
get_roll_yield_bar_range_df = get_roll_yield_bar(
type_method="date",
- var="CF",
+ var="IM",
start_day="20230101",
- end_day="20230104",
+ end_day="20230112",
)
print(get_roll_yield_bar_range_df)
|
new ExpressionBuilder.make_psg(), update .add_virtual_arg(), .add_state_arg()
ELinearElasticTerm works also in residual mode (but still WIP) | @@ -3,6 +3,7 @@ import opt_einsum as oe
from sfepy.base.base import output, Struct
from sfepy.base.timing import Timer
+from sfepy.mechanics.tensors import dim2sym
from sfepy.terms.terms import Term
from sfepy.terms import register_term
@@ -50,6 +51,21 @@ class ExpressionBuilder(Struct):
self.dofs_cache = dofs_cache
self.aux_letters = iter(self._aux_letters)
+ @staticmethod
+ def make_psg(dim):
+ sym = dim2sym(dim)
+ psg = nm.zeros((dim, dim, sym))
+ if dim == 3:
+ psg[0, [0,1,2], [0,3,4]] = 1
+ psg[1, [0,1,2], [3,1,5]] = 1
+ psg[2, [0,1,2], [4,5,2]] = 1
+
+ elif dim == 2:
+ psg[0, [0,1], [0,2]] = 1
+ psg[1, [0,1], [2,1]] = 1
+
+ return psg
+
def add_constant(self, val, name):
append_all(self.subscripts, 'cq')
append_all(self.operands, val)
@@ -113,10 +129,7 @@ class ExpressionBuilder(Struct):
else: # symmetric gradient
# if modifier == 's'....
- psg = nm.zeros((3, 3, 6))
- psg[0, [0,1,2], [0,3,4]] = 1
- psg[1, [0,1,2], [3,1,5]] = 1
- psg[2, [0,1,2], [4,5,2]] = 1
+ psg = self.make_psg(arg.dim)
self.add_psg(iic, ein, psg)
out_letters = iic + out_letters
@@ -135,8 +148,15 @@ class ExpressionBuilder(Struct):
out_letters = iin
if (diff_var != arg.name):
+ if ':' not in ein:
self.add_arg_dofs(iin, ein, arg)
+ else:
+ iic = next(self.aux_letters) # component
+ psg = self.make_psg(arg.dim)
+ self.add_psg(iic, ein, psg)
+ self.add_arg_dofs(iin, [iic], arg)
+
else:
if arg.n_components > 1:
iic = next(self.aux_letters) # component
@@ -144,10 +164,7 @@ class ExpressionBuilder(Struct):
ee = nm.eye(arg.n_components)
else:
- psg = nm.zeros((3, 3, 6))
- psg[0, [0,1,2], [0,3,4]] = 1
- psg[1, [0,1,2], [3,1,5]] = 1
- psg[2, [0,1,2], [4,5,2]] = 1
+ psg = self.make_psg(arg.dim)
out_letters = iic + out_letters
|
[TIR] Update ir_comparator message to be more clear about what is being compared
Update ir_comparator message to be more clear about what is being compared. This would be more useful when debugging tensorize mismatches. | @@ -41,7 +41,9 @@ class TensorIntrinMismatchError : public ScheduleError {
String DetailRenderTemplate() const final {
std::ostringstream os;
- os << "The stmt {0} doesn't match the tensor intrin\n " << rhs_stmt_;
+ os << "The stmt {0} doesn't match the tensor intrin\nThe pattern attempting to be matched:\n"
+ << lhs_stmt_ << "\nDoes not match the tensorize description:\n"
+ << rhs_stmt_;
for (const auto& msg : error_messages_) {
os << msg << std::endl;
}
|
Fixing a typo in prediction_heads docs
This PR fixes a small typo in the prediction heads docs, replacing 'flexivle' with 'flexible'. | @@ -12,7 +12,7 @@ We will take a look at our own new **model classes with flexible heads** (e.g. `
```eval_rst
.. important::
Although the two prediction head implementations serve the same use case, their weights are *not* directly compatible, i.e. you cannot load a head created with ``AutoModelWithHeds`` into a model of type ``AutoModelForSequenceClassification``.
- There is however an `automatic conversion to model classes with flexivle heads <#automatic-conversion>`_.
+ There is however an `automatic conversion to model classes with flexible heads <#automatic-conversion>`_.
```
## Models with flexible heads
|
Fix error in Saltstack's rest auth "Authentication module threw 'status' "
Fixes
updated rest.auth method to get 'status' and 'dict' in the
http.query result | @@ -58,7 +58,8 @@ def auth(username, password):
# Post to the API endpoint. If 200 is returned then the result will be the ACLs
# for this user
- result = salt.utils.http.query(url, method='POST', data=data)
+ result = salt.utils.http.query(url, method='POST', data=data, status=True,
+ decode=True)
if result['status'] == 200:
log.debug('eauth REST call returned 200: {0}'.format(result))
if result['dict'] is not None:
|
compose: Fix color of preview icon.
Fixes the color of preview iocn to match other message-control-button icons. | @@ -468,14 +468,12 @@ a.message-control-button {
a#markdown_preview {
margin-left: 2px;
- color: hsl(0, 0%, 47%);
}
a#undo_markdown_preview {
text-decoration: none;
position: relative;
font-size: 15px;
- color: hsl(0, 0%, 47%);
margin-left: 2px;
}
|
Fix CircleCI imports
Summary: Pull Request resolved: | @@ -4,10 +4,9 @@ import abc
import logging
from typing import List
-import torch
from reagent.core.observers import CompositeObserver
from reagent.core.tracker import Observer
-from reagent.oss_workflow.result_registries import TrainingReport
+from reagent.workflow.result_registries import TrainingReport
logger = logging.getLogger(__name__)
|
Assume yes if prompted to attempt to fix
* Assume yes if prompted to attempt to fix
References
* Be more verbose on input error | @@ -449,7 +449,7 @@ def fix(force, paths, bench=False, fixed_suffix="", logger=None, **kwargs):
)
c = click.getchar().lower()
click.echo("...")
- if c == "y":
+ if c in ("y", "\r", "\n"):
click.echo("Attempting fixes...")
# TODO: Remove verbose
success = do_fixes(
@@ -464,7 +464,7 @@ def fix(force, paths, bench=False, fixed_suffix="", logger=None, **kwargs):
elif c == "n":
click.echo("Aborting...")
else:
- click.echo("Invalid input :(")
+ click.echo("Invalid input, please enter 'Y' or 'N'")
click.echo("Aborting...")
else:
click.echo("==== no fixable linting violations found ====")
|
Change SOA detail
Change ptype for time interval props to int from time | @@ -182,10 +182,10 @@ class DnsMod(CoreModule):
('soa:ns', {'ptype': 'inet:fqdn', 'doc': 'The domain (MNAME) returned in the SOA record', 'ro': 1}),
('soa:email', {'ptype': 'inet:email', 'doc': 'The normalized email address (RNAME) returned in the SOA record', 'ro': 1}),
('soa:serial', {'ptype': 'int', 'doc': 'The SERIAL value returned in the SOA record', 'ro': 1}),
- ('soa:refresh', {'ptype': 'time', 'doc': 'The REFRESH value returned in the SOA record', 'ro': 1}),
- ('soa:retry', {'ptype': 'time', 'doc': 'The RETRY value returned in the SOA record', 'ro': 1}),
- ('soa:expire', {'ptype': 'time', 'doc': 'The EXPIRE value returned in the SOA record', 'ro': 1}),
- ('soa:min', {'ptype': 'time', 'doc': 'The MINIMUM value returned in the SOA record', 'ro': 1}),
+ ('soa:refresh', {'ptype': 'int', 'doc': 'The REFRESH value returned in the SOA record', 'ro': 1}),
+ ('soa:retry', {'ptype': 'int', 'doc': 'The RETRY value returned in the SOA record', 'ro': 1}),
+ ('soa:expire', {'ptype': 'int', 'doc': 'The EXPIRE value returned in the SOA record', 'ro': 1}),
+ ('soa:min', {'ptype': 'int', 'doc': 'The MINIMUM value returned in the SOA record', 'ro': 1}),
('txt', {'ptype': 'inet:dns:txt', 'doc': 'The DNS TXT record returned by the lookup', 'ro': 1}),
('txt:fqdn', {'ptype': 'inet:fqdn', 'doc': 'The domain queried for its TXT record', 'ro': 1}),
|
zulip_tools.py: Add `get_environment()` function.
This function can be used to determine the environment in which a
script is being executed. | @@ -147,3 +147,11 @@ def log_management_command(cmd, log_path):
logger.setLevel(logging.INFO)
logger.info("Ran '%s'" % (cmd,))
+
+def get_environment():
+ # type: () -> Text
+ if os.path.exists(DEPLOYMENTS_DIR):
+ return "prod"
+ if os.environ.get("TRAVIS"):
+ return "travis"
+ return "dev"
|
treat SystemExit as normal exit
fixes | @@ -215,9 +215,9 @@ class StdoutLog(ContextLog):
super().__init__()
def _write_post_mortem(self, etype, value, tb):
- if etype is None:
+ if etype in (None, SystemExit):
return
- elif etype in (KeyboardInterrupt, SystemExit, bdb.BdbQuit):
+ elif etype in (KeyboardInterrupt, bdb.BdbQuit):
self.write('error', 'killed by user')
else:
try:
@@ -355,9 +355,9 @@ class HtmlLog(ContextTreeLog):
def _write_post_mortem(self, etype, value, tb):
'write exception nfo to html log'
- if etype is None:
+ if etype in (None, SystemExit):
return
- if etype in (KeyboardInterrupt, SystemExit, bdb.BdbQuit):
+ if etype in (KeyboardInterrupt, bdb.BdbQuit):
self.write('error', 'killed by user')
return
@@ -398,9 +398,9 @@ class IndentLog(ContextTreeLog):
super().__init__()
def _write_post_mortem(self, etype, value, tb):
- if etype is None:
+ if etype in (None, SystemExit):
return
- elif etype in (KeyboardInterrupt, SystemExit, bdb.BdbQuit):
+ elif etype in (KeyboardInterrupt, bdb.BdbQuit):
self.write('error', 'killed by user')
else:
try:
|
Update lfsr stuff slightly
Better commenting for how the LFSR works, and also just make the function generate values (no need to pass them in) | @@ -288,21 +288,34 @@ void StartRX(void const * argument){
// x^6 + x^5 + 1 with period 63
-static const uint8_t POLY_MASK = 0b0110000;
+static const uint8_t POLY_MASK = 0b00110000;
/**
- * @brief Updates the contents of the linear feedback shift register passed in.
- * At any given time, its contents will contain a psuedorandom sequence
- * which repeats after a period dependent on the polynomial structure.
- * @param lfsr Pointer to shift register contents. Holds output prn sequence
+ * @brief Generates a pseudo-random noise sequence based on a linear feedback
+ * shift register, which repeats after a period dependent on the
+ * polynomial structure.
+ * @return Pseudo-random noise byte
*/
-static inline void update_lfsr(uint8_t* lfsr){
- uint8_t stream_in = *lfsr & 1;
- *lfsr >>= 1;
-
- if(stream_in == 1){
- *lfsr ^= POLY_MASK;
+static inline uint8_t get_noise(void){
+ static uint8_t lfsr = 0x2F; // Seed for PRNG
+
+ uint8_t feedback_line = lfsr & 1;
+ lfsr >>= 1;
+
+ // For any binary digit A:
+ // A xor 0 = A
+ // A xor 1 = !A
+ //
+ // The 1's in the polynomial indicate bits that the feedback line is
+ // connected to via modulo-2 adders (i.e. xor). Given the above rules,
+ // we only need to compute this addition when the feedback line is a 1
+ // since there is no update to the lfsr contents (besides the shift)
+ // when the feedback line is 0
+ if(feedback_line == 1){
+ lfsr ^= POLY_MASK;
}
+
+ return lfsr;
}
@@ -311,16 +324,11 @@ void StartTX(void const * argument){
uint8_t buf[8] = {0xFF, 0xFF, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00};
Data_t data;
- int8_t prn = 0x2F; // seed value for PRNG
-
for(;;){
while(xQueueReceive(toBeSentQHandle, &data, portMAX_DELAY) != pdTRUE);
- // Generate new pseudo-random number
- update_lfsr(&prn);
-
buf[2] = data.id;
- buf[5] = (data.pos & 0xFF) + prn; // low byte with statistical noise
+ buf[5] = (data.pos & 0xFF) + get_noise(); // low byte + noise
buf[6] = (data.pos >> 8) & 0xFF; // high byte
buf[7] = ~sumBytes(buf, 6);
|
Fix typo of intro document
change from adress to address | @@ -34,7 +34,7 @@ What is DNS?
-----------------------------
The Domain Name System (DNS) is a system for naming resources connected to a
-network, and works by storing various types of *record*, such as an IP adress
+network, and works by storing various types of *record*, such as an IP address
associated with a domain name. In practice, this is implemented by
*authoritative name servers* which contain these records and *resolvers* which
query name servers for records. Names are divided up into a hierarchy of zones,
|
Missed another ci->.ci
Should have been more systematic with my grepping. | @@ -20,22 +20,22 @@ pip install -e .
echo "--- Generate the signing key"
# Generate the server's signing key.
-python -m synapse.app.homeserver --generate-keys -c ci/sqlite-config.yaml
+python -m synapse.app.homeserver --generate-keys -c .ci/sqlite-config.yaml
echo "--- Prepare test database"
# Make sure the SQLite3 database is using the latest schema and has no pending background update.
-scripts-dev/update_database --database-config ci/sqlite-config.yaml
+scripts-dev/update_database --database-config .ci/sqlite-config.yaml
# Create the PostgreSQL database.
-./ci/scripts/postgres_exec.py "CREATE DATABASE synapse"
+.ci/scripts/postgres_exec.py "CREATE DATABASE synapse"
echo "+++ Run synapse_port_db against test database"
-coverage run scripts/synapse_port_db --sqlite-database ci/test_db.db --postgres-config ci/postgres-config.yaml
+coverage run scripts/synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
# We should be able to run twice against the same database.
echo "+++ Run synapse_port_db a second time"
-coverage run scripts/synapse_port_db --sqlite-database ci/test_db.db --postgres-config ci/postgres-config.yaml
+coverage run scripts/synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
#####
@@ -44,14 +44,14 @@ coverage run scripts/synapse_port_db --sqlite-database ci/test_db.db --postgres-
echo "--- Prepare empty SQLite database"
# we do this by deleting the sqlite db, and then doing the same again.
-rm ci/test_db.db
+rm .ci/test_db.db
-scripts-dev/update_database --database-config ci/sqlite-config.yaml
+scripts-dev/update_database --database-config .ci/sqlite-config.yaml
# re-create the PostgreSQL database.
-./ci/scripts/postgres_exec.py \
+.ci/scripts/postgres_exec.py \
"DROP DATABASE synapse" \
"CREATE DATABASE synapse"
echo "+++ Run synapse_port_db against empty database"
-coverage run scripts/synapse_port_db --sqlite-database ci/test_db.db --postgres-config ci/postgres-config.yaml
+coverage run scripts/synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
Added community tutorials to docs
With a Kafka starter | @@ -12,6 +12,13 @@ These projects from the community are developed on top of Channels:
* DjangoChannelsJsonRpc_, a wrapper for the JSON-RPC protocol.
* channels-demultiplexer_, a (de)multiplexer for ``AsyncJsonWebsocketConsumer`` consumers.
+Community Tutorials
+===================
+
+Here are some Channels tutorials from around the community:
+
+* kafka-integration_, a writeup on integrating Kafka with Channels.
+
If you'd like to add your project, please submit a PR with a link and brief description.
.. _Beatserver: https://github.com/rajasimon/beatserver
@@ -22,3 +29,4 @@ If you'd like to add your project, please submit a PR with a link and brief desc
.. _Apollo: https://github.com/maliesa96/apollo
.. _DjangoChannelsJsonRpc: https://github.com/millerf/django-channels2-jsonrpc
.. _channels-demultiplexer: https://github.com/csdenboer/channels-demultiplexer
+.. _kafka-integration: https://gist.github.com/CosmicReindeer/da071d027050cfe0a03df3b500f2f44b
|
Avoid echoing 'Environment Variables:' string, when no envvars exist
SIM: | @@ -130,7 +130,9 @@ def create_environment_variables_list(environment_variables, as_option_settings=
def print_environment_vars(environment_variables):
+ if environment_variables:
io.echo(' Environment Variables:')
+
for environment_variable, value in iteritems(environment_variables):
environment_variable, value = utils.mask_vars(environment_variable, value)
io.echo(' ', environment_variable, '=', value)
|
Update QRL setup instructions for raspberry pi.txt
added lines for installing blessings and statistics dependencies | @@ -47,6 +47,12 @@ sudo pip install leveldb
4.
sudo pip install Twisted==16.0.0 (you need version 16.0.0 or it won't work)
+5.
+sudo pip install blessings
+
+6.
+sudo pip install statistics
+
|
Update apt_unclassified.txt
> ```apt_bisonal``` by reason: | @@ -1564,13 +1564,3 @@ gridnetworking.net
# Reference: https://twitter.com/__0XYC__/status/1535107137441251328
t7g5c.app.link
-
-# Reference: https://twitter.com/h2jazi/status/1537536029250490382
-# Reference: https://www.virustotal.com/gui/ip-address/137.220.176.165/relations
-# Reference: https://www.virustotal.com/gui/ip-address/64.233.167.99/relations
-# Reference: https://www.virustotal.com/gui/file/7944fa9cbfef2c7d652f032edc159abeaa1fb4fd64143a8fe3b175095c4519f5/detection
-
-lingrevelat.com
-thresident.com
-upportteam.lingrevelat.com
-supportteam.lingrevelat.com
|
[4.0] remove Python 2 related code
also show a FutureWarning for depecated classes | #
# Distributed under the terms of the MIT license.
#
-from __future__ import absolute_import, division, unicode_literals
+import ctypes
from pywikibot.tools import ModuleDeprecationWrapper
-
-from pywikibot.userinterfaces import (
- terminal_interface_base,
- win32_unicode,
-)
-
-import ctypes
+from pywikibot.userinterfaces import terminal_interface_base, win32_unicode
windowsColors = {
'default': 7,
@@ -39,21 +33,21 @@ windowsColors = {
class Win32BaseUI(terminal_interface_base.UI):
- """DEPRECATED. User interface for Win32 terminals without ctypes."""
+ """DEPRECATED. User interface for Win32 terminals."""
def __init__(self):
"""Initializer."""
- super(Win32BaseUI, self).__init__()
+ super().__init__()
self.encoding = 'ascii'
class Win32UI(terminal_interface_base.UI):
- """User interface for Win32 terminals using ctypes."""
+ """User interface for Win32 terminals."""
def __init__(self):
"""Initializer."""
- super(Win32CtypesUI, self).__init__()
+ super().__init__()
(stdin, stdout, stderr, argv) = win32_unicode.get_unicode_console()
self.stdin = stdin
self.stdout = stdout
@@ -87,7 +81,7 @@ Win32CtypesUI = Win32UI
wrapper = ModuleDeprecationWrapper(__name__)
wrapper._add_deprecated_attr('Win32CtypesUI',
replacement_name='Win32UI',
- since='20190217')
+ since='20190217', future_warning=True)
wrapper._add_deprecated_attr('Win32BaseUI',
replacement_name='Win32UI',
- since='20190217')
+ since='20190217', future_warning=True)
|
Windows: Fix, could crash while scanning loaded mdoules during dependency scan
* It's not really clear what kind of module this was, but we should
not care, this is for finding DLLs that were not found only, and
not found DLLs won't help there. | @@ -715,7 +715,10 @@ def getWindowsRunningProcessDLLPaths():
result = OrderedDict()
for handle in _getWindowsRunningProcessModuleHandles():
+ try:
filename = getWindowsRunningProcessModuleFilename(handle)
+ except WindowsError:
+ continue
result[os.path.basename(filename)] = filename
|
langkit.lexer: minor refactoring
TN: | @@ -468,14 +468,12 @@ class Lexer(object):
literal, self.tokens.__name__
)
)
- if literal in self.literals_map:
- return self.literals_map[literal]
- else:
check_source_language(
- False,
- "{} token literal is not part of the valid tokens for "
- "this grammar".format(literal)
+ literal in self.literals_map,
+ '{} token literal is not part of the valid tokens for this'
+ ' this grammar'.format(literal)
)
+ return self.literals_map[literal]
@property
def sorted_tokens(self):
|
Updates Silence Lock
Modifies the lock on the silence command, in order to choose between ctx
and channel arg based on input. | @@ -2,7 +2,6 @@ import json
import logging
from contextlib import suppress
from datetime import datetime, timedelta, timezone
-from operator import attrgetter
from typing import Optional, Union
from async_rediscache import RedisCache
@@ -13,7 +12,7 @@ from discord.ext.commands import Context
from bot.bot import Bot
from bot.constants import Channels, Emojis, Guild, MODERATION_ROLES, Roles
from bot.converters import HushDurationConverter
-from bot.utils.lock import LockedResourceError, lock_arg
+from bot.utils.lock import LockedResourceError, lock, lock_arg
from bot.utils.scheduling import Scheduler
log = logging.getLogger(__name__)
@@ -137,8 +136,17 @@ class Silence(commands.Cog):
elif source_channel != target_channel:
await target_channel.send(message)
+ async def _select_lock_channel(*args) -> Union[TextChannel, VoiceChannel]:
+ """Passes the channel to be silenced to the resource lock."""
+ channel = args[0].get("channel")
+ if channel is not None:
+ return channel
+
+ else:
+ return args[0].get("ctx").channel
+
@commands.command(aliases=("hush",))
- @lock_arg(LOCK_NAMESPACE, "ctx", attrgetter("channel"), raise_error=True)
+ @lock(LOCK_NAMESPACE, _select_lock_channel, raise_error=True)
async def silence(
self, ctx: Context, duration: HushDurationConverter = 10, kick: bool = False,
*, channel: Union[TextChannel, VoiceChannel] = None
|
Update for JupyterLab 1.x
For JupyterLab version 1.0.0 and higher we no longer need to copy vpython_data files to appropriate directory. It is now handled in the Jupyter labextension for vpython. | @@ -55,7 +55,7 @@ except ImportError:
pass
else:
# We have jupyterlab, is it the right version?
- if jupyterlab.__version__ >= '0.35.0':
+ if (jupyterlab.__version__ >= '0.35.0') and (jupyterlab.__version__ < '1.0.0'):
from os.path import join
labextensions_dir = join(jupyterlab.commands.get_app_dir(), u'static')
try:
|
change default sample_pro
change default sample_pro to ensure correctively running | @@ -57,7 +57,7 @@ def parse_args():
parser.add_argument(
'--learning_rate', type=float, default=0.001, help='Learning rate')
parser.add_argument(
- '--sample_pro', type=float, default=0.1, help='Sample probability for training data')
+ '--sample_pro', type=float, default=1, help='Sample probability for training data')
parser.add_argument(
'--max_len', type=int, default=50, help='Max length for sentences')
|
Update README.md
Deleted Gitter-Batch | <a href="https://microbadger.com/#/images/opsdroid/opsdroid"><img src="https://img.shields.io/microbadger/layers/opsdroid/opsdroid.svg" alt="Docker Layers" /></a>
<a href="http://opsdroid.readthedocs.io/en/stable/?badge=stable"><img src="https://img.shields.io/readthedocs/opsdroid/latest.svg" alt="Documentation Status" /></a>
<a href="https://riot.im/app/#/room/#opsdroid-general:matrix.org"><img src="https://img.shields.io/matrix/opsdroid-general:matrix.org.svg?logo=matrix" alt="Matrix Chat" /></a>
-<a href="https://gitter.im/opsdroid"><img src="https://img.shields.io/badge/gitter-join%20chat-4fb896.svg" alt="Gitter Badge" /></a>
<a href="#backers"><img src="https://opencollective.com/opsdroid/backers/badge.svg" alt="Backers on Open Collective" /></a>
<a href="#sponsors"><img src="https://opencollective.com/opsdroid/sponsors/badge.svg" alt="Sponsors on Open Collective" /></a>
<a href="https://www.codetriage.com/opsdroid/opsdroid"><img src="https://www.codetriage.com/opsdroid/opsdroid/badges/users.svg" alt="Open Source Helpers" /></a>
|
Fix invalid link
Fix invalid link in get_started.md | @@ -15,7 +15,7 @@ The following tutorials demonstrates how to run ElasticDL on different environme
[Minikube](https://kubernetes.io/docs/setup/learning-environment/minikube/) is a tool that makes it easy to run Kubernetes locally.
It runs a single-node Kubernetes cluster inside a Virtual Machine (VM) on the laptop so developers can try out Kubernetes or develop with it day-to-day.
-[This tutorial](elastic_local.md) uses Minikube to run ElasticDL on a local laptop.
+[This tutorial](elasticdl_local.md) uses Minikube to run ElasticDL on a local laptop.
### ElasticDL on On-prem Cluster
|
Update README.md
Corrected syntax mistake - unneeded 'the'. | @@ -136,7 +136,7 @@ Please check out our [Troubleshooting guide](https://plotly.com/python/troublesh
### Static Image Export
plotly.py supports [static image export](https://plotly.com/python/static-image-export/),
-using the either the [`kaleido`](https://github.com/plotly/Kaleido)
+using either the [`kaleido`](https://github.com/plotly/Kaleido)
package (recommended, supported as of `plotly` version 4.9) or the [orca](https://github.com/plotly/orca)
command line utility (legacy as of `plotly` version 4.9).
|
docs: Add Markdown inline code marker around inline XML example.
Presently, this tag is not rendered --- by Gitiles, at least --- which
makes the example very confusing indeed.
Tested-by: Jashank Jeremy | @@ -267,7 +267,7 @@ Attribute `groups`: List of groups to which this project belongs,
whitespace or comma separated. All projects belong to the group
"all", and each project automatically belongs to a group of
its name:`name` and path:`path`. E.g. for
-<project name="monkeys" path="barrel-of"/>, that project
+`<project name="monkeys" path="barrel-of"/>`, that project
definition is implicitly in the following manifest groups:
default, name:monkeys, and path:barrel-of. If you place a project in the
group "notdefault", it will not be automatically downloaded by repo.
|
Document amplicon trimming better
See | @@ -462,3 +462,32 @@ a match of the 3' adapter, the string ``;2`` is added. If there are two rows, th
.. versionadded:: 3.4
Column 12 (revcomp flag) added
+
+
+.. _properly-paired-reads:
+
+Properly paired reads
+---------------------
+
+When reading paired-end files, Cutadapt checks whether the read names match.
+Only the part of the read name before the first space is considered. If the
+read name ends with ``1`` or ``2`` or ``3``, then that is also ignored. For example,
+two FASTQ headers that would be considered to denote properly paired reads are::
+
+ @my_read/1 a comment
+
+and::
+
+ @my_read/2 another comment
+
+This is an example for *improperly paired* read names::
+
+ @my_read/1;1
+
+and::
+
+ @my_read/2;1
+
+Since the ``1`` and ``2`` (and ``3``) are ignored only if the occur at the end of the read
+name, and since the ``;1`` is considered to be part of the read name, these
+reads will not be considered to be propely paired.
|
[MNT] update wheels to 3.10
Updates wheels to 3.10, adds version 3.10 to wheels matrix | @@ -14,7 +14,7 @@ jobs:
- uses: actions/setup-python@v2
with:
- python-version: '3.9'
+ python-version: '3.10'
- name: Build wheel
run: |
@@ -34,7 +34,7 @@ jobs:
strategy:
matrix:
os: [ubuntu-20.04, macOS-10.15]
- python-version: [3.7, 3.8, 3.9]
+ python-version: [3.7, 3.8, 3.9, 3.10]
steps:
- uses: actions/checkout@v2
@@ -74,6 +74,16 @@ jobs:
python-version: 3.8
bitness: 64
platform_id: win_amd64
+ - os: windows-2019
+ python: 38
+ python-version: 3.9
+ bitness: 64
+ platform_id: win_amd64
+ - os: windows-2019
+ python: 38
+ python-version: 3.10
+ bitness: 64
+ platform_id: win_amd64
steps:
- uses: actions/checkout@v2
|
Subsets and Splits