instance_id
stringlengths 10
57
| base_commit
stringlengths 40
40
| created_at
stringdate 2014-04-30 14:58:36
2025-04-30 20:14:11
| environment_setup_commit
stringlengths 40
40
| hints_text
stringlengths 0
273k
| patch
stringlengths 251
7.06M
| problem_statement
stringlengths 11
52.5k
| repo
stringlengths 7
53
| test_patch
stringlengths 231
997k
| meta
dict | version
stringclasses 851
values | install_config
dict | requirements
stringlengths 93
34.2k
⌀ | environment
stringlengths 760
20.5k
⌀ | FAIL_TO_PASS
listlengths 1
9.39k
| FAIL_TO_FAIL
listlengths 0
2.69k
| PASS_TO_PASS
listlengths 0
7.87k
| PASS_TO_FAIL
listlengths 0
192
| license_name
stringclasses 55
values | __index_level_0__
int64 0
21.4k
| before_filepaths
listlengths 1
105
| after_filepaths
listlengths 1
105
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
mozilla__bleach-263 | ec71f4f283ef12da5a56799a3ddb7a1bcb1eadd2 | 2017-03-07 01:19:28 | fa4cadc5b3dad924b6c1e82a284cb2bb7a94377f | diff --git a/CHANGES b/CHANGES
index 3ff2b78..79f56a9 100644
--- a/CHANGES
+++ b/CHANGES
@@ -25,6 +25,12 @@ Version 2.0 (in development)
Amongst other things, this version will add end tags even if the tag in
question is to be escaped.
+* ``bleach.clean`` and friends attribute callables now take three arguments:
+ tag, attribute name and attribute value. Previously they only took attribute
+ name and attribute value.
+
+ All attribute callables will need to be updated.
+
* ``bleach.linkify`` was rewritten
``linkify`` was reimplemented as an html5lib Filter. As such, it no longer
@@ -52,6 +58,8 @@ Version 2.0 (in development)
don't then html5lib will raise an assertion error that the value is not
unicode.
+ All linkify filters will need to be updated.
+
**Changes**
* Supports Python 3.6.
@@ -68,6 +76,8 @@ Version 2.0 (in development)
you can pass as a filter to ``bleach.sanitizer.Cleaner`` allowing you to clean
and linkify in one pass.
+* ``bleach.clean`` and friends can now take a callable as an attributes arg value.
+
* Tons of bug fixes.
* Cleaned up tests.
diff --git a/bleach/sanitizer.py b/bleach/sanitizer.py
index 06c9066..1223e79 100644
--- a/bleach/sanitizer.py
+++ b/bleach/sanitizer.py
@@ -128,7 +128,7 @@ class Cleaner(object):
source=self.walker(dom),
# Bleach-sanitizer-specific things
- allowed_attributes_map=self.attributes,
+ attributes=self.attributes,
strip_disallowed_elements=self.strip,
strip_html_comments=self.strip_comments,
@@ -146,22 +146,58 @@ class Cleaner(object):
return self.serializer.render(filtered)
+def attribute_filter_factory(attributes):
+ """Generates attribute filter function for the given attributes value
+
+ The attributes value can take one of several shapes. This returns a filter
+ function appropriate to the attributes value. One nice thing about this is
+ that there's less if/then shenanigans in the ``allow_token`` method.
+
+ """
+ if callable(attributes):
+ return attributes
+
+ if isinstance(attributes, dict):
+ def _attr_filter(tag, attr, value):
+ if tag in attributes:
+ attr_val = attributes[tag]
+ if callable(attr_val):
+ return attr_val(tag, attr, value)
+
+ if attr in attr_val:
+ return True
+
+ if '*' in attributes:
+ attr_val = attributes['*']
+ if callable(attr_val):
+ return attr_val(tag, attr, value)
+
+ return attr in attr_val
+
+ return False
+
+ return _attr_filter
+
+ if isinstance(attributes, list):
+ def _attr_filter(tag, attr, value):
+ return attr in attributes
+
+ return _attr_filter
+
+ raise ValueError('attributes needs to be a callable, a list or a dict')
+
+
class BleachSanitizerFilter(sanitizer.Filter):
"""html5lib Filter that sanitizes text
This filter can be used anywhere html5lib filters can be used.
"""
- def __init__(self, source, allowed_attributes_map,
+ def __init__(self, source, attributes=ALLOWED_ATTRIBUTES,
strip_disallowed_elements=False, strip_html_comments=True,
**kwargs):
- if isinstance(allowed_attributes_map, dict):
- self.wildcard_attributes = allowed_attributes_map.get('*', [])
- self.allowed_attributes_map = allowed_attributes_map
- else:
- self.wildcard_attributes = allowed_attributes_map
- self.allowed_attributes_map = {}
+ self.attr_filter = attribute_filter_factory(attributes)
self.strip_disallowed_elements = strip_disallowed_elements
self.strip_html_comments = strip_html_comments
@@ -205,10 +241,6 @@ class BleachSanitizerFilter(sanitizer.Filter):
def allow_token(self, token):
"""Handles the case where we're allowing the tag"""
if 'data' in token:
- allowed_attributes = self.allowed_attributes_map.get(token['name'], [])
- if not callable(allowed_attributes):
- allowed_attributes += self.wildcard_attributes
-
# Loop through all the attributes and drop the ones that are not
# allowed, are unsafe or break other rules. Additionally, fix
# attribute values that need fixing.
@@ -220,11 +252,10 @@ class BleachSanitizerFilter(sanitizer.Filter):
namespace, name = namespaced_name
# Drop attributes that are not explicitly allowed
- if callable(allowed_attributes):
- if not allowed_attributes(name, val):
- continue
-
- elif name not in allowed_attributes:
+ #
+ # NOTE(willkg): We pass in the attribute name--not a namespaced
+ # name.
+ if not self.attr_filter(token['name'], name, val):
continue
# Look at attributes that have uri values
diff --git a/docs/clean.rst b/docs/clean.rst
index 161e435..b02c452 100644
--- a/docs/clean.rst
+++ b/docs/clean.rst
@@ -55,8 +55,8 @@ The default value is also a conservative dict found in
As a list
---------
-The ``attributes`` value can be a list, in which case the attributes are allowed
-for any tag.
+The ``attributes`` value can be a list which specifies the list of attributes
+allowed for any tag.
For example:
@@ -76,10 +76,12 @@ For example:
As a dict
---------
-The ``attributes`` value can be a dict, in which case the keys are tag names (or
-a wildcard: ``*`` for all tags) and the values are lists of allowed attributes.
+The ``attributes`` value can be a dict which maps tags to what attributes they can have.
-For example:
+You can also specify ``*``, which will match any tag.
+
+For example, this allows "href" and "rel" for "a" tags, "alt" for the "img" tag
+and "class" for any tag (including "a" and "img"):
.. doctest::
@@ -99,48 +101,66 @@ For example:
u'<img alt="an example">'
-In this case, ``class`` is allowed on any allowed element (from the ``tags``
-argument), ``<a>`` tags are allowed to have ``href`` and ``rel`` attributes,
-and so on.
-
-
Using functions
---------------
-You can also use callables. If the callable returns ``True``, the attribute is
-allowed. Otherwise, it is stripped. For example:
+You can also use callables that take the tag, attribute name and attribute value
+and returns ``True`` to keep the attribute or ``False`` to drop it.
+
+You can pass a callable as the attributes argument value and it'll run for
+every tag/attr.
+
+For example:
+
+.. doctest::
+
+ >>> import bleach
+
+ >>> def allow_h(tag, name, value):
+ ... return name[0] == 'h'
+
+ >>> bleach.clean(
+ ... u'<a href="http://example.com" title="link">link</a>',
+ ... tags=['a'],
+ ... attributes=allow_h,
+ ... )
+ u'<a href="http://example.com">link</a>'
+
+
+You can also pass a callable as a value in an attributes dict and it'll run for
+attributes for specified tags:
.. doctest::
- >>> from urlparse import urlparse
- >>> import bleach
+ >>> from urlparse import urlparse
+ >>> import bleach
- >>> def allow_src(name, value):
- ... if name in ('alt', 'height', 'width'):
- ... return True
- ... if name == 'src':
- ... p = urlparse(value)
- ... return (not p.netloc) or p.netloc == 'mydomain.com'
- ... return False
+ >>> def allow_src(tag, name, value):
+ ... if name in ('alt', 'height', 'width'):
+ ... return True
+ ... if name == 'src':
+ ... p = urlparse(value)
+ ... return (not p.netloc) or p.netloc == 'mydomain.com'
+ ... return False
- >>> bleach.clean(
- ... u'<img src="http://example.com" alt="an example">',
- ... tags=['img'],
- ... attributes={
- ... 'img': allow_src
- ... }
- ... )
- u'<img alt="an example">'
+ >>> bleach.clean(
+ ... u'<img src="http://example.com" alt="an example">',
+ ... tags=['img'],
+ ... attributes={
+ ... 'img': allow_src
+ ... }
+ ... )
+ u'<img alt="an example">'
Allowed styles (``styles``)
===========================
-If you allow the ``style`` attribute, you will also need to whitelist styles
-users are allowed to set, for example ``color`` and ``background-color``.
+If you allow the ``style`` attribute, you will also need to specify the allowed
+styles users are allowed to set, for example ``color`` and ``background-color``.
-The default value is an empty list, i.e., the ``style`` attribute will be
-allowed but no values will be.
+The default value is an empty list. In other words, the ``style`` attribute will
+be allowed but no style declaration names will be allowed.
For example, to allow users to set the color and font-weight of text:
@@ -205,8 +225,8 @@ Default protocols are in ``bleach.ALLOWED_PROTOCOLS``.
Stripping markup (``strip``)
============================
-By default, Bleach *escapes* tags that aren't specified in the tags
-whitelist and invalid markup. For example:
+By default, Bleach *escapes* tags that aren't specified in the allowed tags list
+and invalid markup. For example:
.. doctest::
| improved callable filter support
this is perhaps a bad title. i'll explain the problem:
as mentioned in an earlier ticket, i needed to support the "data-*" namespace on attributes. the suggested route is callable filters.
the two main problems are as follows
- `*` doesn't accept callable filters. based on the current setup, it requires an iterable.
- specific tags can have a callable filter OR a list of attributes. not both.
both of these are invalid:
```
macro_attributes_ok = {
'*': allowable_macro_attributes,
'a' : ['href', 'title', 'target', 'rel', allowable_macro_attributes],
}
```
the callable_filters, as-is, must be implemented with a dedicated function per TAG. This gets messy as a lot of code must be written.
```
macro_attributes_ok = {
'a' : allowable_attributes_a,
'div' : allowable_attributes_div,
'span' : allowable_attributes_span,
}
```
approaches that could be nicer / easier to deal with:
• allow for callables to be within a list of text attributes
```
macro_attributes_ok = {
'a' : [ 'href', 'link', allowable_attributes_setA ],
'div' : [ 'class', 'style', allowable_attributes_setA ],
}
```
• pass the tag's name in as a kwarg to the callable. this would allow callables to reflect the current tag -- and be backwards compatible.
```
def filter_src(name, value, tag=None):
if tag != 'img':
return False
if name in ('alt', 'height', 'width'):
return True
```
- the `attributes` kwarg to clean could be a callable itself; one that expects a triplet of data
def validate_attribute( tag, attribute, value ):
return True
clean( text, attributes=validate_attribute)
| mozilla/bleach | diff --git a/tests/test_basics.py b/tests/test_basics.py
index 031ab66..5b59ebf 100644
--- a/tests/test_basics.py
+++ b/tests/test_basics.py
@@ -164,23 +164,46 @@ class TestClean:
clean = '<em class="FOO">BAR</em>'
assert bleach.clean(dirty, attributes=['class']) == clean
- def test_wildcard_attributes(self):
+ def test_attributes_callable(self):
+ """Verify attributes can take a callable"""
+ ATTRS = lambda tag, name, val: name == 'title'
+ TAGS = ['a']
+
+ assert (
+ bleach.clean(u'<a href="/foo" title="blah">example</a>', tags=TAGS, attributes=ATTRS) ==
+ u'<a title="blah">example</a>'
+ )
+
+ def test_attributes_wildcard(self):
+ """Verify attributes[*] works"""
ATTRS = {
'*': ['id'],
'img': ['src'],
}
- TAG = ['img', 'em']
+ TAGS = ['img', 'em']
dirty = ('both <em id="foo" style="color: black">can</em> have '
'<img id="bar" src="foo"/>')
assert (
- bleach.clean(dirty, tags=TAG, attributes=ATTRS) ==
+ bleach.clean(dirty, tags=TAGS, attributes=ATTRS) ==
'both <em id="foo">can</em> have <img id="bar" src="foo">'
)
- def test_callable_attributes(self):
- """Verify callable attributes work and get correct arg values"""
- def img_test(attr, val):
- return attr == 'src' and val.startswith('https')
+ def test_attributes_wildcard_callable(self):
+ """Verify attributes[*] callable works"""
+ ATTRS = {
+ '*': lambda tag, name, val: name == 'title'
+ }
+ TAGS = ['a']
+
+ assert (
+ bleach.clean(u'<a href="/foo" title="blah">example</a>', tags=TAGS, attributes=ATTRS) ==
+ u'<a title="blah">example</a>'
+ )
+
+ def test_attributes_tag_callable(self):
+ """Verify attributes[tag] callable works"""
+ def img_test(tag, name, val):
+ return name == 'src' and val.startswith('https')
ATTRS = {
'img': img_test,
@@ -198,6 +221,28 @@ class TestClean:
u'foo <img src="https://example.com"> baz'
)
+ def test_attributes_tag_list(self):
+ """Verify attributes[tag] list works"""
+ ATTRS = {
+ 'a': ['title']
+ }
+ TAGS = ['a']
+
+ assert (
+ bleach.clean(u'<a href="/foo" title="blah">example</a>', tags=TAGS, attributes=ATTRS) ==
+ u'<a title="blah">example</a>'
+ )
+
+ def test_attributes_list(self):
+ """Verify attributes list works"""
+ ATTRS = ['title']
+ TAGS = ['a']
+
+ assert (
+ bleach.clean(u'<a href="/foo" title="blah">example</a>', tags=TAGS, attributes=ATTRS) ==
+ u'<a title="blah">example</a>'
+ )
+
def test_svg_attr_val_allows_ref(self):
"""Unescape values in svg attrs that allow url references"""
# Local IRI, so keep it
diff --git a/tests/test_security.py b/tests/test_security.py
index 2aac020..da0fe92 100644
--- a/tests/test_security.py
+++ b/tests/test_security.py
@@ -75,7 +75,7 @@ def test_invalid_href_attr():
def test_invalid_filter_attr():
IMG = ['img', ]
IMG_ATTR = {
- 'img': lambda attr, val: attr == 'src' and val == "http://example.com/"
+ 'img': lambda tag, name, val: name == 'src' and val == "http://example.com/"
}
assert (
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 3
} | 1.5 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest_v2",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": null,
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.6",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest -v"
} | alabaster==0.7.13
args==0.1.0
Babel==2.11.0
-e git+https://github.com/mozilla/bleach.git@ec71f4f283ef12da5a56799a3ddb7a1bcb1eadd2#egg=bleach
certifi==2021.5.30
charset-normalizer==2.0.12
clint==0.5.1
distlib==0.3.9
docutils==0.18.1
filelock==3.4.1
flake8==3.3.0
html5lib==1.1
idna==3.10
imagesize==1.4.1
importlib-metadata==4.8.3
importlib-resources==5.4.0
Jinja2==3.0.3
MarkupSafe==2.0.1
mccabe==0.6.1
pkginfo==1.10.0
platformdirs==2.4.0
pluggy==0.4.0
py==1.11.0
pycodestyle==2.3.1
pyflakes==1.5.0
Pygments==2.14.0
pytest==3.0.6
pytest-wholenodeid==0.2
pytz==2025.2
requests==2.27.1
requests-toolbelt==1.0.0
six==1.17.0
snowballstemmer==2.2.0
Sphinx==1.5.2
tox==2.4.1
twine==1.8.1
typing_extensions==4.1.1
urllib3==1.26.20
virtualenv==20.17.1
webencodings==0.5.1
zipp==3.6.0
| name: bleach
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.13
- args==0.1.0
- babel==2.11.0
- charset-normalizer==2.0.12
- clint==0.5.1
- distlib==0.3.9
- docutils==0.18.1
- filelock==3.4.1
- flake8==3.3.0
- html5lib==1.1
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==4.8.3
- importlib-resources==5.4.0
- jinja2==3.0.3
- markupsafe==2.0.1
- mccabe==0.6.1
- pkginfo==1.10.0
- platformdirs==2.4.0
- pluggy==0.4.0
- py==1.11.0
- pycodestyle==2.3.1
- pyflakes==1.5.0
- pygments==2.14.0
- pytest==3.0.6
- pytest-wholenodeid==0.2
- pytz==2025.2
- requests==2.27.1
- requests-toolbelt==1.0.0
- six==1.17.0
- snowballstemmer==2.2.0
- sphinx==1.5.2
- tox==2.4.1
- twine==1.8.1
- typing-extensions==4.1.1
- urllib3==1.26.20
- virtualenv==20.17.1
- webencodings==0.5.1
- zipp==3.6.0
prefix: /opt/conda/envs/bleach
| [
"tests/test_basics.py::TestClean::test_attributes_callable",
"tests/test_basics.py::TestClean::test_attributes_wildcard_callable",
"tests/test_basics.py::TestClean::test_attributes_tag_callable",
"tests/test_security.py::test_invalid_filter_attr"
]
| []
| [
"tests/test_basics.py::TestClean::test_empty",
"tests/test_basics.py::TestClean::test_nbsp",
"tests/test_basics.py::TestClean::test_comments_only",
"tests/test_basics.py::TestClean::test_with_comments",
"tests/test_basics.py::TestClean::test_no_html",
"tests/test_basics.py::TestClean::test_allowed_html",
"tests/test_basics.py::TestClean::test_bad_html",
"tests/test_basics.py::TestClean::test_function_arguments",
"tests/test_basics.py::TestClean::test_named_arguments",
"tests/test_basics.py::TestClean::test_disallowed_html",
"tests/test_basics.py::TestClean::test_bad_href",
"tests/test_basics.py::TestClean::test_bare_entities",
"tests/test_basics.py::TestClean::test_escaped_entities",
"tests/test_basics.py::TestClean::test_weird_strings",
"tests/test_basics.py::TestClean::test_stripping",
"tests/test_basics.py::TestClean::test_allowed_styles",
"tests/test_basics.py::TestClean::test_lowercase_html",
"tests/test_basics.py::TestClean::test_attributes_wildcard",
"tests/test_basics.py::TestClean::test_attributes_tag_list",
"tests/test_basics.py::TestClean::test_attributes_list",
"tests/test_basics.py::TestClean::test_svg_attr_val_allows_ref",
"tests/test_basics.py::TestClean::test_user_defined_protocols_valid",
"tests/test_basics.py::TestClean::test_user_defined_protocols_invalid",
"tests/test_basics.py::TestClean::test_filters",
"tests/test_basics.py::test_clean_idempotent",
"tests/test_basics.py::TestCleaner::test_basics",
"tests/test_security.py::test_nested_script_tag",
"tests/test_security.py::test_nested_script_tag_r",
"tests/test_security.py::test_invalid_attr",
"tests/test_security.py::test_unquoted_attr",
"tests/test_security.py::test_unquoted_event_handler",
"tests/test_security.py::test_invalid_attr_value",
"tests/test_security.py::test_invalid_href_attr",
"tests/test_security.py::test_invalid_tag_char",
"tests/test_security.py::test_unclosed_tag",
"tests/test_security.py::test_strip",
"tests/test_security.py::test_poster_attribute",
"tests/test_security.py::test_feed_protocol",
"tests/test_security.py::test_regressions[/bleach/tests/data/1.test->\"><script>alert(\"XSS\")</script>&\\n]",
"tests/test_security.py::test_regressions[/bleach/tests/data/2.test-\"><STYLE>@import\"javascript:alert('XSS')\";</STYLE>\\n]",
"tests/test_security.py::test_regressions[/bleach/tests/data/3.test->\"'><img%20src%3D%26%23x6a;%26%23x61;%26%23x76;%26%23x61;%26%23x73;%26%23x63;%26%23x72;%26%23x69;%26%23x70;%26%23x74;%26%23x3a;alert(%26quot;%26%23x20;XSS%26%23x20;Test%26%23x20;Successful%26quot;)>\\n]",
"tests/test_security.py::test_regressions[/bleach/tests/data/5.test->%22%27><img%20src%3d%22javascript:alert(%27%20XSS%27)%22>\\n]",
"tests/test_security.py::test_regressions[/bleach/tests/data/7.test-\">\\n]",
"tests/test_security.py::test_regressions[/bleach/tests/data/8.test->\"\\n]",
"tests/test_security.py::test_regressions[/bleach/tests/data/9.test-'';!--\"<XSS>=&{()}\\n]",
"tests/test_security.py::test_regressions[/bleach/tests/data/14.test-<IMGSRC=java&<WBR>#115;crip&<WBR>#116;:a\\n]",
"tests/test_security.py::test_regressions[/bleach/tests/data/15.test-le&<WBR>#114;t('XS<WBR>;S')>\\n]",
"tests/test_security.py::test_regressions[/bleach/tests/data/16.test-<IMGSRC=ja&<WBR>#0000118as&<WBR>#0000099ri&<WBR>#0000112t:&<WBR>#0000097le&<WBR>#0000114t(&<WBR>#0000039XS&<WBR>#0000083')>\\n]",
"tests/test_security.py::test_regressions[/bleach/tests/data/17.test-<IMGSRC=javas&<WBR>#x63ript:&<WBR>#x61lert(&<WBR>#x27XSS')>\\n]",
"tests/test_security.py::test_regression_manually"
]
| []
| Apache License 2.0 | 1,064 | [
"bleach/sanitizer.py",
"CHANGES",
"docs/clean.rst"
]
| [
"bleach/sanitizer.py",
"CHANGES",
"docs/clean.rst"
]
|
|
mozilla__bleach-265 | fa4cadc5b3dad924b6c1e82a284cb2bb7a94377f | 2017-03-07 02:52:20 | fa4cadc5b3dad924b6c1e82a284cb2bb7a94377f | diff --git a/CHANGES b/CHANGES
index 79f56a9..050f4fc 100644
--- a/CHANGES
+++ b/CHANGES
@@ -60,6 +60,23 @@ Version 2.0 (in development)
All linkify filters will need to be updated.
+* ``bleach.linkify`` and friends had a ``skip_pre`` argument--that's been
+ replaced with a more general ``skip_tags`` argument.
+
+ Before, you might do::
+
+ bleach.linkify(some_text, skip_pre=True)
+
+ The equivalent with Bleach 2.0 is::
+
+ bleach.linkify(some_text, skip_tags=['pre'])
+
+ You can skip other tags, too, like ``style`` or ``script`` or other places
+ where you don't want linkification happening.
+
+ All uses of linkify that use ``skip_pre`` will need to be updated.
+
+
**Changes**
* Supports Python 3.6.
diff --git a/README.rst b/README.rst
index 403ff9b..08dd886 100644
--- a/README.rst
+++ b/README.rst
@@ -8,7 +8,7 @@ Bleach
.. image:: https://badge.fury.io/py/bleach.svg
:target: http://badge.fury.io/py/bleach
-Bleach is a whitelist-based HTML sanitizing library that escapes or strips
+Bleach is a allowed-list-based HTML sanitizing library that escapes or strips
markup and attributes.
Bleach can also linkify text safely, applying filters that Django's ``urlize``
diff --git a/bleach/__init__.py b/bleach/__init__.py
index 07b5075..a231f13 100644
--- a/bleach/__init__.py
+++ b/bleach/__init__.py
@@ -47,16 +47,16 @@ def clean(text, tags=ALLOWED_TAGS, attributes=ALLOWED_ATTRIBUTES,
:arg str text: the text to clean
- :arg list tags: whitelist of allowed tags; defaults to
+ :arg list tags: allowed list of tags; defaults to
``bleach.ALLOWED_TAGS``
- :arg dict attributes: whitelist of allowed attributes; defaults to
- ``bleach.ALLOWED_ATTRIBUTES``
+ :arg dict attributes: allowed attributes; can be a callable, list or dict;
+ defaults to ``bleach.ALLOWED_ATTRIBUTES``
- :arg list styles: whitelist of allowed css; defaults to
+ :arg list styles: allowed list of css styles; defaults to
``bleach.ALLOWED_STYLES``
- :arg list protocols: whitelist of allowed protocols for links; defaults
+ :arg list protocols: allowed list of protocols for links; defaults
to ``bleach.ALLOWED_PROTOCOLS``
:arg bool strip: whether or not to strip disallowed elements
@@ -77,7 +77,7 @@ def clean(text, tags=ALLOWED_TAGS, attributes=ALLOWED_ATTRIBUTES,
return cleaner.clean(text)
-def linkify(text, callbacks=DEFAULT_CALLBACKS, skip_pre=False, parse_email=False):
+def linkify(text, callbacks=DEFAULT_CALLBACKS, skip_tags=None, parse_email=False):
"""Convert URL-like strings in an HTML fragment to links
This function converts strings that look like URLs, domain names and email
@@ -106,7 +106,9 @@ def linkify(text, callbacks=DEFAULT_CALLBACKS, skip_pre=False, parse_email=False
:arg list callbacks: list of callbacks to run when adjusting tag attributes
- :arg bool skip_pre: whether or not to skip linkifying text in a ``pre`` tag
+ :arg list skip_tags: list of tags that you don't want to linkify the
+ contents of; for example, you could set this to ``['pre']`` to skip
+ linkifying contents of ``pre`` tags
:arg bool parse_email: whether or not to linkify email addresses
@@ -115,7 +117,7 @@ def linkify(text, callbacks=DEFAULT_CALLBACKS, skip_pre=False, parse_email=False
"""
linker = Linker(
callbacks=callbacks,
- skip_pre=skip_pre,
+ skip_tags=skip_tags,
parse_email=parse_email
)
return linker.linkify(text)
diff --git a/bleach/linkifier.py b/bleach/linkifier.py
index 92351be..6103e81 100644
--- a/bleach/linkifier.py
+++ b/bleach/linkifier.py
@@ -74,10 +74,40 @@ EMAIL_RE = re.compile(
class Linker(object):
- def __init__(self, callbacks=DEFAULT_CALLBACKS, skip_pre=False, parse_email=False,
+ """Convert URL-like strings in an HTML fragment to links
+
+ This function converts strings that look like URLs, domain names and email
+ addresses in text that may be an HTML fragment to links, while preserving:
+
+ 1. links already in the string
+ 2. urls found in attributes
+ 3. email addresses
+
+ linkify does a best-effort approach and tries to recover from bad
+ situations due to crazy text.
+
+ """
+ def __init__(self, callbacks=DEFAULT_CALLBACKS, skip_tags=None, parse_email=False,
url_re=URL_RE, email_re=EMAIL_RE):
+ """Creates a Linker instance
+
+ :arg list callbacks: list of callbacks to run when adjusting tag attributes
+
+ :arg list skip_tags: list of tags that you don't want to linkify the
+ contents of; for example, you could set this to ``['pre']`` to skip
+ linkifying contents of ``pre`` tags
+
+ :arg bool parse_email: whether or not to linkify email addresses
+
+ :arg re url_re: url matching regex
+
+ :arg email_re: email matching regex
+
+ :returns: linkified text as unicode
+
+ """
self.callbacks = callbacks
- self.skip_pre = skip_pre
+ self.skip_tags = skip_tags
self.parse_email = parse_email
self.url_re = url_re
self.email_re = email_re
@@ -105,7 +135,7 @@ class Linker(object):
filtered = LinkifyFilter(
source=self.walker(dom),
callbacks=self.callbacks,
- skip_pre=self.skip_pre,
+ skip_tags=self.skip_tags,
parse_email=self.parse_email,
url_re=self.url_re,
email_re=self.email_re,
@@ -126,12 +156,31 @@ class LinkifyFilter(Filter):
This filter can be used anywhere html5lib filters can be used.
"""
- def __init__(self, source, callbacks=None, skip_pre=False, parse_email=False,
+ def __init__(self, source, callbacks=None, skip_tags=None, parse_email=False,
url_re=URL_RE, email_re=EMAIL_RE):
+ """Creates a LinkifyFilter instance
+
+ :arg TreeWalker source: stream
+
+ :arg list callbacks: list of callbacks to run when adjusting tag attributes
+
+ :arg list skip_tags: list of tags that you don't want to linkify the
+ contents of; for example, you could set this to ``['pre']`` to skip
+ linkifying contents of ``pre`` tags
+
+ :arg bool parse_email: whether or not to linkify email addresses
+
+ :arg re url_re: url matching regex
+
+ :arg email_re: email matching regex
+
+ :returns: linkified text as unicode
+
+ """
super(LinkifyFilter, self).__init__(source)
self.callbacks = callbacks or []
- self.skip_pre = skip_pre
+ self.skip_tags = skip_tags or []
self.parse_email = parse_email
self.url_re = url_re
@@ -140,9 +189,15 @@ class LinkifyFilter(Filter):
def apply_callbacks(self, attrs, is_new):
"""Given an attrs dict and an is_new bool, runs through callbacks
- Callbacks can return an adjusted attrs dict or None. In the case of
- None, we stop going through callbacks and return that and the link gets
- dropped.
+ Callbacks can return an adjusted attrs dict or ``None``. In the case of
+ ``None``, we stop going through callbacks and return that and the link
+ gets dropped.
+
+ :arg dict attrs: map of ``(namespace, name)`` -> ``value``
+
+ :arg bool is_new: whether or not this link was added by linkify
+
+ :returns: adjusted attrs dict or ``None``
"""
for cb in self.callbacks:
@@ -399,7 +454,7 @@ class LinkifyFilter(Filter):
def __iter__(self):
in_a = False
- in_pre = False
+ in_skip_tag = None
token_buffer = []
@@ -425,10 +480,10 @@ class LinkifyFilter(Filter):
continue
elif token['type'] in ['StartTag', 'EmptyTag']:
- if token['name'] == 'pre' and self.skip_pre:
- # The "pre" tag starts a "special mode" where we don't linkify
- # anything.
- in_pre = True
+ if token['name'] in self.skip_tags:
+ # Skip tags start a "special mode" where we don't linkify
+ # anything until the end tag.
+ in_skip_tag = token['name']
elif token['name'] == 'a':
# The "a" tag is special--we switch to a slurp mode and
@@ -441,13 +496,13 @@ class LinkifyFilter(Filter):
# yet
continue
- elif in_pre and self.skip_pre:
+ elif in_skip_tag and self.skip_tags:
# NOTE(willkg): We put this clause here since in_a and
# switching in and out of in_a takes precedence.
- if token['type'] == 'EndTag' and token['name'] == 'pre':
- in_pre = False
+ if token['type'] == 'EndTag' and token['name'] == in_skip_tag:
+ in_skip_tag = None
- elif not in_a and not in_pre and token['type'] == 'Characters':
+ elif not in_a and not in_skip_tag and token['type'] == 'Characters':
new_stream = iter([token])
if self.parse_email:
new_stream = self.handle_email_addresses(new_stream)
diff --git a/bleach/sanitizer.py b/bleach/sanitizer.py
index 1223e79..b5c2fe9 100644
--- a/bleach/sanitizer.py
+++ b/bleach/sanitizer.py
@@ -63,16 +63,16 @@ class Cleaner(object):
strip_comments=True, filters=None):
"""Initializes a Cleaner
- :arg tags: whitelist of allowed tags; defaults to
+ :arg list tags: allowed list of tags; defaults to
``bleach.ALLOWED_TAGS``
- :arg attributes: whitelist of allowed attributes; defaults to
- ``bleach.ALLOWED_ATTRIBUTES``
+ :arg dict attributes: allowed attributes; can be a callable, list or dict;
+ defaults to ``bleach.ALLOWED_ATTRIBUTES``
- :arg styles: whitelist of allowed css; defaults to
+ :arg list styles: allowed list of css styles; defaults to
``bleach.ALLOWED_STYLES``
- :arg protocols: whitelist of allowed protocols for links; defaults
+ :arg list protocols: allowed list of protocols for links; defaults
to ``bleach.ALLOWED_PROTOCOLS``
:arg strip: whether or not to strip disallowed elements
@@ -196,7 +196,27 @@ class BleachSanitizerFilter(sanitizer.Filter):
def __init__(self, source, attributes=ALLOWED_ATTRIBUTES,
strip_disallowed_elements=False, strip_html_comments=True,
**kwargs):
+ """Creates a BleachSanitizerFilter instance
+ :arg Treewalker source: stream
+
+ :arg list tags: allowed list of tags; defaults to
+ ``bleach.ALLOWED_TAGS``
+
+ :arg dict attributes: allowed attributes; can be a callable, list or dict;
+ defaults to ``bleach.ALLOWED_ATTRIBUTES``
+
+ :arg list styles: allowed list of css styles; defaults to
+ ``bleach.ALLOWED_STYLES``
+
+ :arg list protocols: allowed list of protocols for links; defaults
+ to ``bleach.ALLOWED_PROTOCOLS``
+
+ :arg strip_disallowed_elements: whether or not to strip disallowed elements
+
+ :arg strip_html_comments: whether or not to strip HTML comments
+
+ """
self.attr_filter = attribute_filter_factory(attributes)
self.strip_disallowed_elements = strip_disallowed_elements
diff --git a/docs/goals.rst b/docs/goals.rst
index 632c222..015bc56 100644
--- a/docs/goals.rst
+++ b/docs/goals.rst
@@ -13,15 +13,15 @@ Goals
=====
-Always take a whitelist-based approach
---------------------------------------
+Always take a allowed-list-based approach
+-----------------------------------------
-Bleach should always take a whitelist-based approach to allowing any kind of
-content or markup. Blacklisting is error-prone and not future proof.
+Bleach should always take a allowed-list-based approach to markup filtering.
+Specifying disallowed lists is error-prone and not future proof.
For example, you should have to opt-in to allowing the ``onclick`` attribute,
-not blacklist all the other ``on*`` attributes. Future versions of HTML may add
-new event handlers, like ``ontouch``, that old blacklists would not prevent.
+not opt-out of all the other ``on*`` attributes. Future versions of HTML may add
+new event handlers, like ``ontouch``, that old disallow would not prevent.
Main goal is to sanitize input of malicious content
@@ -39,8 +39,8 @@ Examples might include:
These examples, and others, are traditionally prone to security issues like XSS
or other script injection, or annoying issues like unclosed tags and invalid
-markup. Bleach will take a proactive, whitelist-only approach to allowing HTML
-content, and will use the HTML5 parsing algorithm to handle invalid markup.
+markup. Bleach will take a proactive, allowed-list-only approach to allowing
+HTML content, and will use the HTML5 parsing algorithm to handle invalid markup.
See the :ref:`chapter on clean() <clean-chapter>` for more info.
@@ -52,7 +52,7 @@ The secondary goal of Bleach is to provide a mechanism for finding or altering
links (``<a>`` tags with ``href`` attributes, or things that look like URLs or
email addresses) in text.
-While Bleach itself will always operate on a whitelist-based security model,
+While Bleach itself will always operate on a allowed-list-based security model,
the :ref:`linkify() method <linkify-chapter>` is flexible enough to allow the
creation, alteration, and removal of links based on an extremely wide range of
use cases.
@@ -69,8 +69,8 @@ Sanitize complete HTML documents
--------------------------------
Once you're creating whole documents, you have to allow so many tags that a
-blacklist approach (e.g. forbidding ``<script>`` or ``<object>``) may be more
-appropriate.
+disallow-list approach (e.g. forbidding ``<script>`` or ``<object>``) may be
+more appropriate.
Remove all HTML or transforming content for some non-web-page purpose
diff --git a/docs/linkify.rst b/docs/linkify.rst
index a468830..71c2e7f 100644
--- a/docs/linkify.rst
+++ b/docs/linkify.rst
@@ -248,15 +248,13 @@ For example, this removes any ``mailto:`` links:
u'mail janet!'
-Skipping links in pre blocks (``skip_pre``)
-===========================================
+Skipping links in specified tag blocks (``skip_tags``)
+======================================================
``<pre>`` tags are often special, literal sections. If you don't want to create
-any new links within a ``<pre>`` section, pass ``skip_pre=True``.
+any new links within a ``<pre>`` section, pass ``skip_tags=['pre']``.
-.. note::
- Though new links will not be created, existing links created with ``<a>``
- tags will still be passed through all the callbacks.
+This works for ``code``, ``div`` and any other blocks you want to skip over.
Linkifying email addresses (``parse_email``)
@@ -281,7 +279,7 @@ instance.
>>> from bleach.linkifier import Linker
- >>> linker = Linker(skip_pre=True)
+ >>> linker = Linker(skip_tags=['pre'])
>>> linker.linkify('a b c http://example.com d e f')
u'a b c <a href="http://example.com" rel="nofollow">http://example.com</a> d e f'
@@ -328,7 +326,7 @@ And passing parameters to ``LinkifyFilter``:
>>> cleaner = Cleaner(
... tags=['pre'],
- ... filters=[partial(LinkifyFilter, skip_pre=True)]
+ ... filters=[partial(LinkifyFilter, skip_tags=['pre'])]
... )
...
>>> cleaner.clean('<pre>http://example.com</pre>')
diff --git a/setup.py b/setup.py
index 39fbb37..bf95efc 100644
--- a/setup.py
+++ b/setup.py
@@ -12,7 +12,7 @@ if 'test' in sys.argv:
tests_require = [
'pytest>=3.0.0',
]
-
+
install_requires = [
'six',
# >= 8 9s because of breaking API change
@@ -42,7 +42,7 @@ def get_version():
setup(
name='bleach',
version=get_version(),
- description='An easy whitelist-based HTML-sanitizing tool.',
+ description='An easy safelist-based HTML-sanitizing tool.',
long_description=get_long_desc(),
maintainer='Will Kahn-Greene',
url='http://github.com/mozilla/bleach',
| linkify feature request - extend skip_pre
Hi,
Can the skip_pre be extended to support a list of html tags?
IMHO, `code` tag should be skipped as well.
https://github.com/mozilla/bleach/blob/master/bleach/__init__.py#L318
Thx
| mozilla/bleach | diff --git a/tests/test_links.py b/tests/test_links.py
index 99b30b8..a3cb973 100644
--- a/tests/test_links.py
+++ b/tests/test_links.py
@@ -357,24 +357,24 @@ def test_unsafe_url():
)
-def test_skip_pre():
- """Skip linkification in <pre> tags."""
+def test_skip_tags():
+ """Skip linkification in skip tags"""
simple = 'http://xx.com <pre>http://xx.com</pre>'
linked = ('<a href="http://xx.com" rel="nofollow">http://xx.com</a> '
'<pre>http://xx.com</pre>')
all_linked = ('<a href="http://xx.com" rel="nofollow">http://xx.com</a> '
'<pre><a href="http://xx.com" rel="nofollow">http://xx.com'
'</a></pre>')
- assert linkify(simple, skip_pre=True) == linked
+ assert linkify(simple, skip_tags=['pre']) == linked
assert linkify(simple) == all_linked
already_linked = '<pre><a href="http://xx.com">xx</a></pre>'
nofollowed = '<pre><a href="http://xx.com" rel="nofollow">xx</a></pre>'
assert linkify(already_linked) == nofollowed
- assert linkify(already_linked, skip_pre=True) == nofollowed
+ assert linkify(already_linked, skip_tags=['pre']) == nofollowed
assert (
- linkify('<pre><code>http://example.com</code></pre>http://example.com', skip_pre=True) ==
+ linkify('<pre><code>http://example.com</code></pre>http://example.com', skip_tags=['pre']) ==
(
'<pre><code>http://example.com</code></pre>'
'<a href="http://example.com" rel="nofollow">http://example.com</a>'
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 8
} | 1.5 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest_v2",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": null,
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.6",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest -v"
} | alabaster==0.7.13
args==0.1.0
Babel==2.11.0
-e git+https://github.com/mozilla/bleach.git@fa4cadc5b3dad924b6c1e82a284cb2bb7a94377f#egg=bleach
certifi==2021.5.30
charset-normalizer==2.0.12
clint==0.5.1
distlib==0.3.9
docutils==0.18.1
filelock==3.4.1
flake8==3.3.0
html5lib==1.1
idna==3.10
imagesize==1.4.1
importlib-metadata==4.8.3
importlib-resources==5.4.0
Jinja2==3.0.3
MarkupSafe==2.0.1
mccabe==0.6.1
pkginfo==1.10.0
platformdirs==2.4.0
pluggy==0.4.0
py==1.11.0
pycodestyle==2.3.1
pyflakes==1.5.0
Pygments==2.14.0
pytest==3.0.6
pytest-wholenodeid==0.2
pytz==2025.2
requests==2.27.1
requests-toolbelt==1.0.0
six==1.17.0
snowballstemmer==2.2.0
Sphinx==1.5.2
tox==2.4.1
twine==1.8.1
typing_extensions==4.1.1
urllib3==1.26.20
virtualenv==20.17.1
webencodings==0.5.1
zipp==3.6.0
| name: bleach
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.13
- args==0.1.0
- babel==2.11.0
- charset-normalizer==2.0.12
- clint==0.5.1
- distlib==0.3.9
- docutils==0.18.1
- filelock==3.4.1
- flake8==3.3.0
- html5lib==1.1
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==4.8.3
- importlib-resources==5.4.0
- jinja2==3.0.3
- markupsafe==2.0.1
- mccabe==0.6.1
- pkginfo==1.10.0
- platformdirs==2.4.0
- pluggy==0.4.0
- py==1.11.0
- pycodestyle==2.3.1
- pyflakes==1.5.0
- pygments==2.14.0
- pytest==3.0.6
- pytest-wholenodeid==0.2
- pytz==2025.2
- requests==2.27.1
- requests-toolbelt==1.0.0
- six==1.17.0
- snowballstemmer==2.2.0
- sphinx==1.5.2
- tox==2.4.1
- twine==1.8.1
- typing-extensions==4.1.1
- urllib3==1.26.20
- virtualenv==20.17.1
- webencodings==0.5.1
- zipp==3.6.0
prefix: /opt/conda/envs/bleach
| [
"tests/test_links.py::test_skip_tags"
]
| []
| [
"tests/test_links.py::test_empty",
"tests/test_links.py::test_simple_link",
"tests/test_links.py::test_trailing_slash",
"tests/test_links.py::test_mangle_link",
"tests/test_links.py::test_mangle_text",
"tests/test_links.py::test_email_link[\"\\\\\\n\"@opa.ru-True-\"\\\\\\n\"@opa.ru]",
"tests/test_links.py::test_set_attrs",
"tests/test_links.py::test_only_proto_links",
"tests/test_links.py::test_stop_email",
"tests/test_links.py::test_tlds[example.yyy-example.yyy]",
"tests/test_links.py::test_tlds[brie-brie]",
"tests/test_links.py::test_escaping",
"tests/test_links.py::test_nofollow_off",
"tests/test_links.py::test_link_in_html",
"tests/test_links.py::test_links_https",
"tests/test_links.py::test_add_rel_nofollow",
"tests/test_links.py::test_url_with_path",
"tests/test_links.py::test_link_ftp",
"tests/test_links.py::test_link_query",
"tests/test_links.py::test_link_fragment",
"tests/test_links.py::test_link_entities",
"tests/test_links.py::test_escaped_html",
"tests/test_links.py::test_link_http_complete",
"tests/test_links.py::test_non_url",
"tests/test_links.py::test_javascript_url",
"tests/test_links.py::test_unsafe_url",
"tests/test_links.py::test_libgl",
"tests/test_links.py::test_end_of_sentence[example.com-.]",
"tests/test_links.py::test_end_of_sentence[example.com-...]",
"tests/test_links.py::test_end_of_sentence[ex.com/foo-.]",
"tests/test_links.py::test_end_of_sentence[ex.com/foo-....]",
"tests/test_links.py::test_end_of_clause",
"tests/test_links.py::test_wrapping_parentheses[(example.com)-expected_data0]",
"tests/test_links.py::test_wrapping_parentheses[(example.com/)-expected_data1]",
"tests/test_links.py::test_wrapping_parentheses[(example.com/foo)-expected_data2]",
"tests/test_links.py::test_wrapping_parentheses[(((example.com/))))-expected_data3]",
"tests/test_links.py::test_wrapping_parentheses[example.com/))-expected_data4]",
"tests/test_links.py::test_wrapping_parentheses[http://en.wikipedia.org/wiki/Test_(assessment)-expected_data7]",
"tests/test_links.py::test_wrapping_parentheses[(http://en.wikipedia.org/wiki/Test_(assessment))-expected_data8]",
"tests/test_links.py::test_wrapping_parentheses[((http://en.wikipedia.org/wiki/Test_(assessment))-expected_data9]",
"tests/test_links.py::test_wrapping_parentheses[(http://en.wikipedia.org/wiki/Test_(assessment)))-expected_data10]",
"tests/test_links.py::test_wrapping_parentheses[(http://en.wikipedia.org/wiki/)Test_(assessment-expected_data11]",
"tests/test_links.py::test_parentheses_with_removing",
"tests/test_links.py::test_ports[http://foo.com:8000-expected_data0]",
"tests/test_links.py::test_ports[http://foo.com:8000/-expected_data1]",
"tests/test_links.py::test_ports[http://bar.com:xkcd-expected_data2]",
"tests/test_links.py::test_ports[http://foo.com:81/bar-expected_data3]",
"tests/test_links.py::test_ports[http://foo.com:-expected_data4]",
"tests/test_links.py::test_ports[http://foo.com:\\u0663\\u0669/-expected_data5]",
"tests/test_links.py::test_ports[http://foo.com:\\U0001d7e0\\U0001d7d8/-expected_data6]",
"tests/test_links.py::test_ignore_bad_protocols",
"tests/test_links.py::test_link_emails_and_urls",
"tests/test_links.py::test_links_case_insensitive",
"tests/test_links.py::test_elements_inside_links",
"tests/test_links.py::test_drop_link_tags",
"tests/test_links.py::test_naughty_unescaping[<br>-<br>]",
"tests/test_links.py::test_hang",
"tests/test_links.py::test_url_re_arg",
"tests/test_links.py::test_email_re_arg",
"tests/test_links.py::test_linkify_idempotent",
"tests/test_links.py::TestLinkify::test_no_href_links",
"tests/test_links.py::TestLinkify::test_rel_already_there"
]
| []
| Apache License 2.0 | 1,065 | [
"README.rst",
"docs/linkify.rst",
"setup.py",
"bleach/__init__.py",
"bleach/sanitizer.py",
"CHANGES",
"bleach/linkifier.py",
"docs/goals.rst"
]
| [
"README.rst",
"docs/linkify.rst",
"setup.py",
"bleach/__init__.py",
"bleach/sanitizer.py",
"CHANGES",
"bleach/linkifier.py",
"docs/goals.rst"
]
|
|
abantos__bolt-88 | 39a5db9fa29ec6c13dceb51480fb3e56574aa2a7 | 2017-03-07 16:19:15 | f5c7c3ca52c5a80bc05e890f956bc0fe194ff007 | diff --git a/bolt/about.py b/bolt/about.py
index 3c285ba..9b6ae95 100644
--- a/bolt/about.py
+++ b/bolt/about.py
@@ -8,4 +8,4 @@ A task runner written in Python
copyright = u'2016 Abantos'
author = u'Isaac Rodriguez'
version = u'0.2'
-release = u'0.2.2'
+release = u'0.2.3'
diff --git a/bolt/tasks/bolt_setup.py b/bolt/tasks/bolt_setup.py
index 6190eb5..e10aa8b 100644
--- a/bolt/tasks/bolt_setup.py
+++ b/bolt/tasks/bolt_setup.py
@@ -26,8 +26,12 @@ configure the task. ::
import distutils.core as dcore
import logging
+import bolt.errors as errors
import bolt.utils as utilities
+
+class BuildSetupError(errors.TaskError): pass
+
DEFAULT_ARGUMENTS = ['build']
DEFAULT_SETUP_SCRIPT = 'setup.py'
@@ -50,11 +54,13 @@ class ExecuteSetupTask(object):
self.setup_script = DEFAULT_SETUP_SCRIPT
generator = _SetupArgumentGenerator()
self.args = generator.generate_from(config)
- self._execute_setup()
+ result = self._execute_setup()
+ if not result.dist_files:
+ raise BuildSetupError()
def _execute_setup(self):
- dcore.run_setup(self.setup_script, self.args)
+ return dcore.run_setup(self.setup_script, self.args)
| Setup Task Swallows Errors Building Distribution
### Description
The current implementation of the Setup task does not evaluate the return object from `distutils.core.run_setup()` and it just assumes it works. Unfortunately, some environments will not have the right tools to build a distribution; therefore, the task will silently fail.
I found this problem in another project's pipeline where building a `wheel` inside the official Python 2.7 Docker container silently failed because the `wheel` package is not installed by default like when creating a virtual environment.
### Steps to Reproduce
- Create a setup task that builds a simple distribution wheel.
- Create a virtual environment, activate it, and uninstall the `wheel` package.
- Run the setup task within the environment.
- Check the dist folder, which will be empty.
The current behavior shows that the return code from `distutils.core.run_setup()` invoked by the task is not evaluated, and the task seems to succeed; however, the `.whl` file has not been created.
An exception is expected to be raised if the wheel doesn't succeed.
### Fix Analysis
Looking at the implementation of `distutils.core.run_setup()`, you can see the function returns a `Distribution` instance. The instance exposes a `dist_files` property, which is empty if the distribution failed to build. If it succeeds, the return value is a list of tuples containing information about each distribution. This allow us to fix the problem, or at list report an error if no distribution was built, by checking the contents of `dist_files`; something like the following code:
```python
# dcore is an alias to distutils.core.
dist_info = dcore.run_setup(self.setup_script, self.args)
if not dist_info.dist_files:
raise DistributionBuildError()
```
> NOTE:I need to think on how to handle multiple distributions; although, I believe right now the task only supports one distribution per configuration.
The following is an example of the contents of the `dist_files` property:
```
[('bdist_wheel', '3.5', 'D:\\Projects\\Abantos\\bolt\\dist\\bolt_ta-0.2.1-py2.py3-none-any.whl')]
```
### Acceptance Criteria
Raises exception if building the distribution fails. | abantos/bolt | diff --git a/test/test_tasks/test_bolt_setup.py b/test/test_tasks/test_bolt_setup.py
index d9a03b9..9e874cc 100644
--- a/test/test_tasks/test_bolt_setup.py
+++ b/test/test_tasks/test_bolt_setup.py
@@ -7,7 +7,7 @@ import _mocks as mck
class TestExecuteSetupTask(unittest.TestCase):
def setUp(self):
- self. subject = ExecuteSetupTaskSpy()
+ self.subject = ExecuteSetupTaskSpy()
return super(TestExecuteSetupTask, self).setUp()
@@ -25,6 +25,12 @@ class TestExecuteSetupTask(unittest.TestCase):
self.assertEqual(self.subject.setup_script, script)
+ def test_raises_exception_if_building_setup_fails(self):
+ self.subject.dist_files = []
+ with self.assertRaises(bsetup.BuildSetupError):
+ self.given({})
+
+
def given(self, config):
self.subject(config=config)
@@ -36,9 +42,13 @@ class TestExecuteSetupTask(unittest.TestCase):
class ExecuteSetupTaskSpy(bsetup.ExecuteSetupTask):
+
+ def __init__(self):
+ super(ExecuteSetupTaskSpy, self).__init__()
+ self.dist_files = [('bdist_wheel', '3.5', '/some/colation/the.whl')]
def _execute_setup(self):
- pass
+ return self
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 2
} | 0.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.6",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.13
attrs==22.2.0
Babel==2.11.0
-e git+https://github.com/abantos/bolt.git@39a5db9fa29ec6c13dceb51480fb3e56574aa2a7#egg=bolt_ta
certifi==2021.5.30
charset-normalizer==2.0.12
conttest==0.0.8
coverage==6.2
docutils==0.18.1
idna==3.10
imagesize==1.4.1
importlib-metadata==4.8.3
iniconfig==1.1.1
Jinja2==3.0.3
MarkupSafe==2.0.1
nose==1.3.7
packaging==21.3
pluggy==1.0.0
py==1.11.0
Pygments==2.14.0
pyparsing==3.1.4
pytest==7.0.1
pytz==2025.2
requests==2.27.1
snowballstemmer==2.2.0
Sphinx==5.3.0
sphinx-rtd-theme==2.0.0
sphinxcontrib-applehelp==1.0.2
sphinxcontrib-devhelp==1.0.2
sphinxcontrib-htmlhelp==2.0.0
sphinxcontrib-jquery==4.1
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==1.0.3
sphinxcontrib-serializinghtml==1.1.5
tomli==1.2.3
typing_extensions==4.1.1
urllib3==1.26.20
zipp==3.6.0
| name: bolt
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.13
- attrs==22.2.0
- babel==2.11.0
- charset-normalizer==2.0.12
- conttest==0.0.8
- coverage==6.2
- docutils==0.18.1
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- jinja2==3.0.3
- markupsafe==2.0.1
- nose==1.3.7
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pygments==2.14.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytz==2025.2
- requests==2.27.1
- snowballstemmer==2.2.0
- sphinx==5.3.0
- sphinx-rtd-theme==2.0.0
- sphinxcontrib-applehelp==1.0.2
- sphinxcontrib-devhelp==1.0.2
- sphinxcontrib-htmlhelp==2.0.0
- sphinxcontrib-jquery==4.1
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==1.0.3
- sphinxcontrib-serializinghtml==1.1.5
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- zipp==3.6.0
prefix: /opt/conda/envs/bolt
| [
"test/test_tasks/test_bolt_setup.py::TestExecuteSetupTask::test_raises_exception_if_building_setup_fails"
]
| []
| [
"test/test_tasks/test_bolt_setup.py::TestExecuteSetupTask::test_uses_default_if_empty_configuration",
"test/test_tasks/test_bolt_setup.py::TestExecuteSetupTask::test_uses_specified_script",
"test/test_tasks/test_bolt_setup.py::TestRegisterTasks::test_registers_setup"
]
| []
| MIT License | 1,066 | [
"bolt/tasks/bolt_setup.py",
"bolt/about.py"
]
| [
"bolt/tasks/bolt_setup.py",
"bolt/about.py"
]
|
|
pberkes__big_O-10 | 042b7d7225ea61d54f43408740278586bea8a01e | 2017-03-07 21:26:20 | 042b7d7225ea61d54f43408740278586bea8a01e | diff --git a/.travis.yml b/.travis.yml
new file mode 100644
index 0000000..4ea1396
--- /dev/null
+++ b/.travis.yml
@@ -0,0 +1,8 @@
+language: python
+python:
+ - "2.7"
+ - "3.4"
+ - "3.5"
+ - "3.6"
+install: "pip install -r requirements.txt"
+script: "python -m unittest discover -v"
diff --git a/MANIFEST.in b/MANIFEST.in
deleted file mode 100755
index f218bf2..0000000
--- a/MANIFEST.in
+++ /dev/null
@@ -1,2 +0,0 @@
-include *.rst
-include *.txt
diff --git a/README.rst b/README.rst
index c1c8674..991b14d 100755
--- a/README.rst
+++ b/README.rst
@@ -99,12 +99,12 @@ Creating an array:
- `numpy.zeros` is O(n), since it needs to initialize every element to 0:
>>> import numpy as np
- >>> big_o.big_o(np.zeros, big_o.datagen.n_, max_n=1000000, n_repeats=5) # doctest: +ELLIPSIS
+ >>> big_o.big_o(np.zeros, big_o.datagen.n_, max_n=100000, n_repeats=100)
(<class 'big_o.big_o.Linear'>, ...)
- `numpy.empty` instead just allocates the memory, and is thus O(1):
- >>> big_o.big_o(np.empty, big_o.datagen.n_, max_n=1000000, n_repeats=5) # doctest: +ELLIPSIS
+ >>> big_o.big_o(np.empty, big_o.datagen.n_, max_n=100000, n_repeats=100)
(<class 'big_o.big_o.Constant'> ...)
diff --git a/big_o/big_o.py b/big_o/big_o.py
index d8652ba..1dbeae4 100755
--- a/big_o/big_o.py
+++ b/big_o/big_o.py
@@ -1,14 +1,15 @@
from __future__ import absolute_import
-import numpy as np
from timeit import Timer
+import numpy as np
+
from big_o.complexities import ALL_CLASSES
def measure_execution_time(func, data_generator,
min_n=100, max_n=100000, n_measures=10,
- n_repeats=1):
+ n_repeats=1, n_timings=1):
""" Measure the execution time of a function for increasing N.
Input:
@@ -30,6 +31,9 @@ def measure_execution_time(func, data_generator,
n_repeats -- Number of times func is called to compute execution time
(return the cumulative time of execution)
+ n_timings -- Number of times the timing measurement is repeated.
+ The minimum time for all the measurements is kept.
+
Output:
-------
@@ -50,11 +54,12 @@ def measure_execution_time(func, data_generator,
# TODO: check that max_n is not larger than max int64
ns = np.linspace(min_n, max_n, n_measures).astype('int64')
- time = np.empty(n_measures)
+ execution_time = np.empty(n_measures)
for i, n in enumerate(ns):
timer = Timer(func_wrapper(n))
- time[i] = timer.timeit(n_repeats)
- return ns, time
+ measurements = timer.repeat(n_timings, n_repeats)
+ execution_time[i] = np.min(measurements)
+ return ns, execution_time
def infer_big_o_class(ns, time, classes=ALL_CLASSES, verbose=False):
@@ -104,7 +109,7 @@ def infer_big_o_class(ns, time, classes=ALL_CLASSES, verbose=False):
def big_o(func, data_generator,
min_n=100, max_n=100000, n_measures=10,
- n_repeats=1, classes=ALL_CLASSES, verbose=False):
+ n_repeats=1, n_timings=1, classes=ALL_CLASSES, verbose=False):
""" Estimate time complexity class of a function from execution time.
Input:
@@ -126,6 +131,9 @@ def big_o(func, data_generator,
n_repeats -- Number of times func is called to compute execution time
(return the cumulative time of execution)
+ n_timings -- Number of times the timing measurement is repeated.
+ The minimum time for all the measurements is kept.
+
classes -- The complexity classes to consider. This is a list of subclasses
of `big_o.complexities.ComplexityClass`.
Default: all the classes in `big_o.complexities.ALL_CLASSES`
@@ -144,5 +152,6 @@ def big_o(func, data_generator,
"""
ns, time = measure_execution_time(func, data_generator,
- min_n, max_n, n_measures, n_repeats)
+ min_n, max_n, n_measures, n_repeats,
+ n_timings)
return infer_big_o_class(ns, time, classes, verbose=verbose)
diff --git a/big_o/datagen.py b/big_o/datagen.py
index ee46786..b26403b 100755
--- a/big_o/datagen.py
+++ b/big_o/datagen.py
@@ -26,7 +26,7 @@ def large_integers(n):
for _ in range(n)]
-def strings(n, chars=string.letters):
+def strings(n, chars=string.ascii_letters):
""" Return random string of N characters, sampled at random from `chars`.
"""
return ''.join([random.choice(chars) for i in xrange(n)])
diff --git a/requirements.txt b/requirements.txt
new file mode 100644
index 0000000..24ce15a
--- /dev/null
+++ b/requirements.txt
@@ -0,0 +1,1 @@
+numpy
| TravisCI pull request testing | pberkes/big_O | diff --git a/big_o/test/test_big_o.py b/big_o/test/test_big_o.py
index bb46ceb..4147402 100755
--- a/big_o/test/test_big_o.py
+++ b/big_o/test/test_big_o.py
@@ -42,27 +42,39 @@ class TestBigO(unittest.TestCase):
assert_array_almost_equal(coeff, res_class.coeff, 2)
def test_big_o(self):
+ def dummy_linear_function(n):
+ for i in range(n):
+ # Dummy operation with constant complexity.
+ 8282828 * 2322
+
+ def dummy_quadratic_function(n):
+ for i in range(n):
+ for j in range(n):
+ # Dummy operation with constant complexity.
+ 8282828 * 2322
+
+ # In the best case, TimSort is linear, so we fix a random array to
+ # make sure we hit a close-to-worst case scenario, which is
+ # O(n*log(n)).
+ random_state = np.random.RandomState(89342787)
+ random_array = random_state.rand(100000)
+
+ # Each test case is a tuple
+ # (function_to_evaluate, expected_complexity_class, range_for_n)
desired = [
- (lambda n: [i for i in xrange(n*100)], compl.Linear),
- (lambda n: 1., compl.Constant),
- (lambda n: [i+j for i in xrange(n) for j in xrange(n)],
- compl.Quadratic),
- (lambda n: sorted(np.random.randn(n*100)), compl.Linearithmic),
+ (dummy_linear_function, compl.Linear, (100, 10000)),
+ (lambda n: 1., compl.Constant, (1000, 10000)),
+ (dummy_quadratic_function, compl.Quadratic, (50, 200)),
+ (lambda n: np.sort(random_array[:n], kind='mergesort'),
+ compl.Linearithmic, (100, random_array.shape[0])),
]
- for func, class_ in desired:
+ for func, class_, n_range in desired:
res_class, fitted = big_o.big_o(
func, datagen.n_,
- min_n=100, max_n=1000, n_repeats=5,
+ min_n=n_range[0],
+ max_n=n_range[1],
+ n_measures=25,
+ n_repeats=10,
+ n_timings=10,
)
self.assertEqual(class_, res_class.__class__)
-
- def test_compute(self):
- x = np.linspace(10, 100, 100)
- y = 3.0 * x + 2.0
- linear = compl.Linear()
- linear.fit(x, y)
- assert_array_almost_equal(linear.compute(x), y, 10)
-
- def test_not_fitted(self):
- linear = compl.Linear()
- self.assertRaises(compl.NotFittedError, linear.compute, 100)
diff --git a/big_o/test/test_complexities.py b/big_o/test/test_complexities.py
new file mode 100644
index 0000000..3935c88
--- /dev/null
+++ b/big_o/test/test_complexities.py
@@ -0,0 +1,19 @@
+import unittest
+import numpy as np
+from numpy.testing import assert_array_almost_equal
+
+from big_o import complexities
+
+
+class TestComplexities(unittest.TestCase):
+
+ def test_compute(self):
+ x = np.linspace(10, 100, 100)
+ y = 3.0 * x + 2.0
+ linear = complexities.Linear()
+ linear.fit(x, y)
+ assert_array_almost_equal(linear.compute(x), y, 10)
+
+ def test_not_fitted(self):
+ linear = complexities.Linear()
+ self.assertRaises(complexities.NotFittedError, linear.compute, 100)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_added_files",
"has_removed_files",
"has_many_modified_files",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 3,
"test_score": 3
},
"num_modified_files": 3
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"numpy>=1.16.0",
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | -e git+https://github.com/pberkes/big_O.git@042b7d7225ea61d54f43408740278586bea8a01e#egg=big_O
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
numpy==2.0.2
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
pytest @ file:///croot/pytest_1738938843180/work
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
| name: big_O
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- numpy==2.0.2
prefix: /opt/conda/envs/big_O
| [
"big_o/test/test_big_o.py::TestBigO::test_infer_big_o",
"big_o/test/test_big_o.py::TestBigO::test_measure_execution_time",
"big_o/test/test_complexities.py::TestComplexities::test_compute",
"big_o/test/test_complexities.py::TestComplexities::test_not_fitted"
]
| [
"big_o/test/test_big_o.py::TestBigO::test_big_o"
]
| []
| []
| BSD 3-Clause "New" or "Revised" License | 1,067 | [
"README.rst",
"big_o/datagen.py",
"MANIFEST.in",
"big_o/big_o.py",
".travis.yml",
"requirements.txt"
]
| [
"README.rst",
"big_o/datagen.py",
"MANIFEST.in",
"big_o/big_o.py",
".travis.yml",
"requirements.txt"
]
|
|
rm-hull__luma.core-43 | 78675e077c6e560afafb3811e4a31bc589857e34 | 2017-03-07 21:44:38 | 27b928405c7e8bfaa8d43af5b0e4d5fbaf9bdb05 | thijstriemstra: Want to add tests before merging it. | diff --git a/luma/core/lib.py b/luma/core/lib.py
new file mode 100644
index 0000000..21a12cb
--- /dev/null
+++ b/luma/core/lib.py
@@ -0,0 +1,42 @@
+from functools import wraps
+
+import luma.core.error
+
+
+__all__ = ["rpi_gpio", "spidev"]
+
+
+def __spidev__(self):
+ # spidev cant compile on macOS, so use a similar technique to
+ # initialize (mainly so the tests run unhindered)
+ import spidev
+ return spidev.SpiDev()
+
+
+def __rpi_gpio__(self):
+ # RPi.GPIO _really_ doesn't like being run on anything other than
+ # a Raspberry Pi... this is imported here so we can swap out the
+ # implementation for a mock
+ try:
+ import RPi.GPIO
+ return RPi.GPIO
+ except RuntimeError as e:
+ if str(e) == 'This module can only be run on a Raspberry Pi!':
+ raise luma.core.error.UnsupportedPlatform(
+ 'GPIO access not available')
+
+
+def rpi_gpio(f):
+ @wraps(f)
+ def wrapper(*args, **kwds):
+ f.__rpi_gpio__ = classmethod(__rpi_gpio__)
+ return f(*args, **kwds)
+ return wrapper
+
+
+def spidev(f):
+ @wraps(f)
+ def wrapper(*args, **kwds):
+ f.__spidev__ = classmethod(__spidev__)
+ return f(*args, **kwds)
+ return wrapper
diff --git a/luma/core/serial.py b/luma/core/serial.py
index e3b083b..da36935 100644
--- a/luma/core/serial.py
+++ b/luma/core/serial.py
@@ -11,6 +11,8 @@ import errno
import luma.core.error
+from luma.core import lib
+
__all__ = ["i2c", "spi"]
@@ -21,7 +23,7 @@ class i2c(object):
Circuit) interface to provide :py:func:`data` and :py:func:`command` methods.
:param bus: a *smbus* implementation, if `None` is supplied (default),
- `smbus2 <https://https://pypi.python.org/pypi/smbus2/0.1.4>`_ is used.
+ `smbus2 <https://pypi.python.org/pypi/smbus2>`_ is used.
Typically this is overridden in tests, or if there is a specific
reason why `pysmbus <https://pypi.python.org/pypi/pysmbus>`_ must be used
over smbus2
@@ -59,7 +61,7 @@ class i2c(object):
# FileNotFoundError
raise luma.core.error.DeviceNotFoundError(
'I2C device not found: {}'.format(e.filename))
- elif e.errno == errno.EPERM or e.errno == errno.EACCES:
+ elif e.errno in [errno.EPERM, errno.EACCES]:
# PermissionError
raise luma.core.error.DevicePermissionError(
'I2C device permission denied: {}'.format(e.filename))
@@ -73,9 +75,20 @@ class i2c(object):
:param cmd: a spread of commands
:type cmd: int
+ :raises luma.core.error.DeviceNotFoundError: I2C device could not be found.
"""
assert(len(cmd) <= 32)
- self._bus.write_i2c_block_data(self._addr, self._cmd_mode, list(cmd))
+
+ try:
+ self._bus.write_i2c_block_data(self._addr, self._cmd_mode,
+ list(cmd))
+ except OSError as e:
+ if e.errno in [errno.EREMOTEIO, errno.EIO]:
+ # I/O error
+ raise luma.core.error.DeviceNotFoundError(
+ 'I2C device not found on address: {}'.format(self._addr))
+ else: # pragma: no cover
+ raise
def data(self, data):
"""
@@ -100,6 +113,8 @@ class i2c(object):
self._bus.close()
[email protected]
[email protected]_gpio
class spi(object):
"""
Wraps an `SPI <https://en.wikipedia.org/wiki/Serial_Peripheral_Interface_Bus>`_
@@ -155,24 +170,6 @@ class spi(object):
self._gpio.output(self._bcm_RST, self._gpio.LOW) # Reset device
self._gpio.output(self._bcm_RST, self._gpio.HIGH) # Keep RESET pulled high
- def __rpi_gpio__(self):
- # RPi.GPIO _really_ doesn't like being run on anything other than
- # a Raspberry Pi... this is imported here so we can swap out the
- # implementation for a mock
- try:
- import RPi.GPIO
- return RPi.GPIO
- except RuntimeError as e:
- if str(e) == 'This module can only be run on a Raspberry Pi!':
- raise luma.core.error.UnsupportedPlatform(
- 'GPIO access not available')
-
- def __spidev__(self):
- # spidev cant compile on macOS, so use a similar technique to
- # initialize (mainly so the tests run unhindered)
- import spidev
- return spidev.SpiDev()
-
def command(self, *cmd):
"""
Sends a command or sequence of commands through to the SPI device.
| OSError: [Errno 5] Input/output error
Was getting this traceback when there wasn't any I2C device connected to the RPi:
```
$ python examples/3d_box.py -i i2c
...
3d_box.py: error: [Errno 5] Input/output error
Error in atexit._run_exitfuncs:
Traceback (most recent call last):
File "/home/pi/projects/pi-test/luma.core/luma/core/device.py", line 70, in cleanup
self.hide()
File "/home/pi/projects/pi-test/luma.core/luma/core/device.py", line 53, in hide
self.command(self._const.DISPLAYOFF)
File "/home/pi/projects/pi-test/luma.core/luma/core/device.py", line 32, in command
self._serial_interface.command(*cmd)
File "/home/pi/projects/pi-test/luma.core/luma/core/serial.py", line 63, in command
self._bus.write_i2c_block_data(self._addr, self._cmd_mode, list(cmd))
File "/home/pi/.virtualenvs/pi-test/lib/python3.4/site-packages/smbus2/smbus2.py", line 274, in write_i2c_block_data
ioctl(self.fd, I2C_SMBUS, msg)
OSError: [Errno 5] Input/output error
```
I thought this `OSError` business was already fixed before; it should throw a devicenotfound error.. | rm-hull/luma.core | diff --git a/tests/test_serial.py b/tests/test_serial.py
index 5c22381..85ed96f 100644
--- a/tests/test_serial.py
+++ b/tests/test_serial.py
@@ -7,6 +7,8 @@
Tests for the :py:mod:`luma.core.serial` module.
"""
+import errno
+
try:
from unittest.mock import patch, call, Mock
except ImportError:
@@ -82,6 +84,25 @@ def test_i2c_command():
smbus.write_i2c_block_data.assert_called_once_with(0x83, 0x00, cmds)
+def test_i2c_command_device_not_found_error():
+ errorbus = Mock(unsafe=True)
+ address = 0x71
+ cmds = [3, 1, 4, 2]
+ expected_error = OSError()
+
+ with patch.object(errorbus, 'write_i2c_block_data') as broken_command:
+ for error_code in [errno.EREMOTEIO, errno.EIO]:
+ expected_error.errno = error_code
+ broken_command.side_effect = expected_error
+
+ serial = i2c(bus=errorbus, address=address)
+ with pytest.raises(luma.core.error.DeviceNotFoundError) as ex:
+ serial.command(*cmds)
+
+ assert str(ex.value) == 'I2C device not found on address: {}'.format(
+ address)
+
+
def test_i2c_data():
data = list(fib(10))
serial = i2c(bus=smbus, address=0x21)
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_added_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 1
} | 0.5 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[test]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest",
"pytest-cov",
"flake8"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | coverage==7.8.0
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
flake8==7.2.0
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
-e git+https://github.com/rm-hull/luma.core.git@78675e077c6e560afafb3811e4a31bc589857e34#egg=luma.core
mccabe==0.7.0
mock==5.2.0
packaging @ file:///croot/packaging_1734472117206/work
pillow==11.1.0
pluggy @ file:///croot/pluggy_1733169602837/work
pycodestyle==2.13.0
pyflakes==3.3.1
pytest @ file:///croot/pytest_1738938843180/work
pytest-cov==6.0.0
RPi.GPIO==0.7.1
smbus2==0.5.0
spidev==3.6
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
| name: luma.core
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==7.8.0
- flake8==7.2.0
- mccabe==0.7.0
- mock==5.2.0
- pillow==11.1.0
- pycodestyle==2.13.0
- pyflakes==3.3.1
- pytest-cov==6.0.0
- rpi-gpio==0.7.1
- smbus2==0.5.0
- spidev==3.6
prefix: /opt/conda/envs/luma.core
| [
"tests/test_serial.py::test_i2c_command_device_not_found_error"
]
| [
"tests/test_serial.py::test_i2c_init_device_permission_error"
]
| [
"tests/test_serial.py::test_i2c_init_device_not_found",
"tests/test_serial.py::test_i2c_init_device_address_error",
"tests/test_serial.py::test_i2c_init_no_bus",
"tests/test_serial.py::test_i2c_init_bus_provided",
"tests/test_serial.py::test_i2c_command",
"tests/test_serial.py::test_i2c_data",
"tests/test_serial.py::test_i2c_data_chunked",
"tests/test_serial.py::test_i2c_cleanup",
"tests/test_serial.py::test_spi_init",
"tests/test_serial.py::test_spi_init_invalid_bus_speed",
"tests/test_serial.py::test_spi_command",
"tests/test_serial.py::test_spi_data",
"tests/test_serial.py::test_spi_cleanup",
"tests/test_serial.py::test_spi_init_device_not_found",
"tests/test_serial.py::test_spi_unsupported_gpio_platform"
]
| []
| MIT License | 1,068 | [
"luma/core/lib.py",
"luma/core/serial.py"
]
| [
"luma/core/lib.py",
"luma/core/serial.py"
]
|
zopefoundation__ZConfig-26 | 227f96029db86137ddb02f18eedce5b4fcd6d8ba | 2017-03-08 14:14:18 | 648fc674631ab28c7268aba114804020f8d7ebec | diff --git a/README.rst b/README.rst
index 627f4f8..e772c10 100644
--- a/README.rst
+++ b/README.rst
@@ -23,6 +23,23 @@ the file LICENSE.txt in the distribution for the full license text.
Reference documentation is available at https://zconfig.readthedocs.io.
+Information on the latest released version of the ZConfig package is
+available at
+
+ https://pypi.python.org/pypi/ZConfig/
+
+You may either create an RPM and install this, or install directly from
+the source distribution.
+
+There is a mailing list for discussions and questions about ZConfig;
+more information on the list is available at
+
+ http://mail.zope.org/mailman/listinfo/zconfig/
+
+
+Configuring Logging
+-------------------
+
One common use of ZConfig is to configure the Python logging
framework. This is extremely simple to do as the following example
demonstrates:
@@ -50,21 +67,7 @@ or above to the console, as we can see in the following example:
A more common configuration would see STDOUT replaced with a path to
the file into which log entries would be written.
-For more information, see section 5.2 on the ZConfig documentation and
-the examples in ZConfig/components/logger/tests.
-
-Information on the latest released version of the ZConfig package is
-available at
-
- https://pypi.python.org/pypi/ZConfig/
-
-You may either create an RPM and install this, or install directly from
-the source distribution.
-
-There is a mailing list for discussions and questions about ZConfig;
-more information on the list is available at
-
- http://mail.zope.org/mailman/listinfo/zconfig/
+For more information, see the `the documentation <https://zconfig.readthedocs.io>`_.
Installing from the source distribution
diff --git a/doc/developing-with-zconfig.rst b/doc/developing-with-zconfig.rst
index 20f90ba..4ef663a 100644
--- a/doc/developing-with-zconfig.rst
+++ b/doc/developing-with-zconfig.rst
@@ -9,5 +9,6 @@
writing-schema
standard-datatypes
standard-components
+ logging-components
writing-components
documenting-components
diff --git a/doc/index.rst b/doc/index.rst
index 19d2cbb..fa1173b 100644
--- a/doc/index.rst
+++ b/doc/index.rst
@@ -18,7 +18,8 @@ ZODB, and is easily used by other projects. ZConfig only relies on the
Python standard library.
For information on reading and writing configuration documents, see
-:doc:`using-zconfig`.
+:doc:`using-zconfig`. For the extremely common usage of configuring
+the Python :mod:`logging` framework, see :doc:`using-logging`.
For information on using ZConfig to create custom configurations for
you projects, see :doc:`developing-with-zconfig`.
@@ -30,6 +31,7 @@ Development of ZConfig is hosted on `GitHub <https://github.com/zopefoundation/Z
:caption: Contents:
using-zconfig
+ using-logging
developing-with-zconfig
zconfig
tools
diff --git a/doc/logging-components.rst b/doc/logging-components.rst
new file mode 100644
index 0000000..c0560f4
--- /dev/null
+++ b/doc/logging-components.rst
@@ -0,0 +1,164 @@
+====================
+ Logging Components
+====================
+
+.. highlight:: xml
+
+
+The :mod:`ZConfig.components.logger` package provides configuration
+support for the :mod:`logging` package in Python's standard library.
+This component can be imported using::
+
+
+ <import package="ZConfig.components.logger"/>
+
+
+This component defines two abstract types and several concrete section
+types. These can be imported as a unit, as above, or as four smaller
+components usable in creating alternate logging packages.
+
+The first of the four smaller components contains the abstract types,
+and can be imported using::
+
+
+ <import package="ZConfig.components.logger" file="abstract.xml"/>
+
+
+The two abstract types imported by this are:
+
+
+**ZConfig.logger.log**
+ Logger objects are represented by this abstract type.
+
+**ZConfig.logger.handler**
+ Each logger object can have one or more "handlers" associated with
+ them. These handlers are responsible for writing logging events to
+ some form of output stream using appropriate formatting. The output
+ stream may be a file on a disk, a socket communicating with a server
+ on another system, or a series of ``syslog`` messages. Section
+ types which implement this type represent these handlers.
+
+
+The second and third of the smaller components provides section types
+that act as factories for :class:`logging.Logger` objects. These can be
+imported using::
+
+
+ <import package="ZConfig.components.logger" file="eventlog.xml"/>
+ <import package="ZConfig.components.logger" file="logger.xml"/>
+
+The types defined in these components implement the
+**ZConfig.logger.log** abstract type. The 'eventlog.xml'
+component defines an **eventlog** type which represents the
+root logger from the the :mod:`logging` package (the return value of
+:func:`logging.getLogger`), while the 'logger.xml' component
+defines a **logger** section type which represents a named
+logger.
+
+
+The third of the smaller components provides section types that are
+factories for :class:`logging.Handler` objects. This can be imported
+using::
+
+
+ <import package="ZConfig.components.logger" file="handlers.xml"/>
+
+
+The types defined in this component implement the
+**ZConfig.logger.handler** abstract type.
+
+
+The configuration objects provided by both the logger and handler
+types are factories for the finished loggers and handlers. These
+factories should be called with no arguments to retrieve the logger or
+log handler objects. Calling the factories repeatedly will cause the
+same objects to be returned each time, so it's safe to simply call
+them to retrieve the objects.
+
+The factories for the logger objects, whether the **eventlog**
+or **logger** section type is used, provide a :meth:`~.reopen`
+method which may be called to close any log files and re-open them.
+This is useful when using a UNIX signal to effect log file
+rotation: the signal handler can call this method, and not have to
+worry about what handlers have been registered for the logger. There
+is also a function in the
+:mod:`ZConfig.components.logger.loghandler` module that re-opens all
+open log files created using ZConfig configuration:
+
+.. py:function:: ZConfig.components.logger.loghandler.reopenFiles()
+
+ Closes and re-opens all the log files held open by handlers created
+ by the factories for ``logfile`` sections. This is intended to
+ help support log rotation for applications.
+
+.. _using-logging:
+
+Using The Logging Components
+============================
+
+Building an application that uses the logging components is fairly
+straightforward. The schema needs to import the relevant components
+and declare their use::
+
+
+ <schema>
+ <import package="ZConfig.components.logger" file="eventlog.xml"/>
+ <import package="ZConfig.components.logger" file="handlers.xml"/>
+
+ <section type="eventlog" name="*" attribute="eventlog"
+ required="yes"/>
+ </schema>
+
+
+In the application, the schema and configuration file should be loaded
+normally. Once the configuration object is available, the logger
+factory should be called to configure Python's :mod:`logging` package:
+
+.. code-block:: python
+
+
+ import os
+ import ZConfig
+
+ def run(configfile):
+ schemafile = os.path.join(os.path.dirname(__file__), "schema.xml")
+ schema = ZConfig.loadSchema(schemafile)
+ config, handlers = ZConfig.loadConfig(schema, configfile)
+
+ # configure the logging package:
+ config.eventlog()
+
+ # now do interesting things
+
+
+An example configuration file for this application may look like this::
+
+
+ <eventlog>
+ level info
+
+ <logfile>
+ path /var/log/myapp
+ format %(asctime)s %(levelname)s %(name)s %(message)s
+ # locale-specific date/time representation
+ dateformat %c
+ </logfile>
+
+ <syslog>
+ level error
+ address syslog.example.net:514
+ format %(levelname)s %(name)s %(message)s
+ </syslog>
+ </eventlog>
+
+
+Refer to the :class:`logging.LogRecord` documentation for the names
+available in the message format strings (the ``format`` key in the
+log handlers). The date format strings (the ``dateformat`` key in
+the log handlers) are the same as those accepted by the
+:func:`time.strftime` function.
+
+Configuring The Logging Components
+==================================
+
+For reference documentation on the available handlers, see :ref:`logging-handlers`.
diff --git a/doc/root-and-child-config.conf b/doc/root-and-child-config.conf
new file mode 100644
index 0000000..531b755
--- /dev/null
+++ b/doc/root-and-child-config.conf
@@ -0,0 +1,11 @@
+<logger>
+ level INFO
+ <logfile>
+ path STDOUT
+ format %(levelname)s %(name)s %(message)s
+ </logfile>
+</logger>
+<logger>
+ name my.package
+ level DEBUG
+</logger>
diff --git a/doc/simple-root-config.conf b/doc/simple-root-config.conf
new file mode 100644
index 0000000..02a5461
--- /dev/null
+++ b/doc/simple-root-config.conf
@@ -0,0 +1,7 @@
+<logger>
+ level INFO
+ <logfile>
+ path STDOUT
+ format %(levelname)s %(name)s %(message)s
+ </logfile>
+</logger>
diff --git a/doc/standard-components.rst b/doc/standard-components.rst
index ae36847..2eb1240 100644
--- a/doc/standard-components.rst
+++ b/doc/standard-components.rst
@@ -5,7 +5,7 @@
===========================================
:mod:`ZConfig` provides a few convenient schema components as part
-of the package. These may be used directly or can server as examples
+of the package. These may be used directly or can serve as examples
for creating new components.
.. highlight:: xml
@@ -108,174 +108,3 @@ then we can use this as the key type for a derived mapping type::
type="email-users"
attribute="email_users"
/>
-
-
-ZConfig.components.logger
-=========================
-
-The :mod:`ZConfig.components.logger` package provides configuration
-support for the :mod:`logging` package in Python's standard library.
-This component can be imported using::
-
-
- <import package="ZConfig.components.logger"/>
-
-
-This component defines two abstract types and several concrete section
-types. These can be imported as a unit, as above, or as four smaller
-components usable in creating alternate logging packages.
-
-The first of the four smaller components contains the abstract types,
-and can be imported using::
-
-
- <import package="ZConfig.components.logger" file="abstract.xml"/>
-
-
-The two abstract types imported by this are:
-
-
-**ZConfig.logger.log**
- Logger objects are represented by this abstract type.
-
-**ZConfig.logger.handler**
- Each logger object can have one or more "handlers" associated with
- them. These handlers are responsible for writing logging events to
- some form of output stream using appropriate formatting. The output
- stream may be a file on a disk, a socket communicating with a server
- on another system, or a series of ``syslog`` messages. Section
- types which implement this type represent these handlers.
-
-
-The second and third of the smaller components provides section types
-that act as factories for :class:`logging.Logger` objects. These can be
-imported using::
-
-
- <import package="ZConfig.components.logger" file="eventlog.xml"/>
- <import package="ZConfig.components.logger" file="logger.xml"/>
-
-The types defined in these components implement the
-**ZConfig.logger.log** abstract type. The 'eventlog.xml'
-component defines an **eventlog** type which represents the
-root logger from the the :mod:`logging` package (the return value of
-:func:`logging.getLogger`), while the 'logger.xml' component
-defines a **logger** section type which represents a named
-logger.
-
-
-The third of the smaller components provides section types that are
-factories for :class:`logging.Handler` objects. This can be imported
-using::
-
-
- <import package="ZConfig.components.logger" file="handlers.xml"/>
-
-
-The types defined in this component implement the
-**ZConfig.logger.handler** abstract type.
-
-
-The configuration objects provided by both the logger and handler
-types are factories for the finished loggers and handlers. These
-factories should be called with no arguments to retrieve the logger or
-log handler objects. Calling the factories repeatedly will cause the
-same objects to be returned each time, so it's safe to simply call
-them to retrieve the objects.
-
-The factories for the logger objects, whether the **eventlog**
-or **logger** section type is used, provide a :meth:`~.reopen`
-method which may be called to close any log files and re-open them.
-This is useful when using a UNIX signal to effect log file
-rotation: the signal handler can call this method, and not have to
-worry about what handlers have been registered for the logger. There
-is also a function in the
-:mod:`ZConfig.components.logger.loghandler` module that re-opens all
-open log files created using ZConfig configuration:
-
-.. py:function:: ZConfig.components.logger.loghandler.reopenFiles()
-
- Closes and re-opens all the log files held open by handlers created
- by the factories for ``logfile`` sections. This is intended to
- help support log rotation for applications.
-
-.. _using-logging:
-
-Using The Logging Components
-----------------------------
-
-Building an application that uses the logging components is fairly
-straightforward. The schema needs to import the relevant components
-and declare their use::
-
-
- <schema>
- <import package="ZConfig.components.logger" file="eventlog.xml"/>
- <import package="ZConfig.components.logger" file="handlers.xml"/>
-
- <section type="eventlog" name="*" attribute="eventlog"
- required="yes"/>
- </schema>
-
-
-In the application, the schema and configuration file should be loaded
-normally. Once the configuration object is available, the logger
-factory should be called to configure Python's :mod:`logging` package:
-
-.. code-block:: python
-
-
- import os
- import ZConfig
-
- def run(configfile):
- schemafile = os.path.join(os.path.dirname(__file__), "schema.xml")
- schema = ZConfig.loadSchema(schemafile)
- config, handlers = ZConfig.loadConfig(schema, configfile)
-
- # configure the logging package:
- config.eventlog()
-
- # now do interesting things
-
-
-An example configuration file for this application may look like this::
-
-
- <eventlog>
- level info
-
- <logfile>
- path /var/log/myapp
- format %(asctime)s %(levelname)s %(name)s %(message)s
- # locale-specific date/time representation
- dateformat %c
- </logfile>
-
- <syslog>
- level error
- address syslog.example.net:514
- format %(levelname)s %(name)s %(message)s
- </syslog>
- </eventlog>
-
-
-Refer to the :class:`logging.LogRecord` documentation for the names
-available in the message format strings (the ``format`` key in the
-log handlers). The date format strings (the ``dateformat`` key in
-the log handlers) are the same as those accepted by the
-:func:`time.strftime` function.
-
-Configuring the email logger
-----------------------------
-
-ZConfig has support for Python's :class:`logging.handlers.SMTPHandler`
-via the ``<email-notifier>`` handler.
-
-.. zconfig:: ZConfig.components.logger
- :file: handlers.xml
- :members: email-notifier
-
-
-For details about the :class:`~logging.handlers.SMTPHandler` see the
-Python :mod:`logging` module.
diff --git a/doc/using-logging.rst b/doc/using-logging.rst
new file mode 100644
index 0000000..c24a2c1
--- /dev/null
+++ b/doc/using-logging.rst
@@ -0,0 +1,147 @@
+=====================
+ Configuring Logging
+=====================
+
+One common use of ZConfig is to configure the Python :mod:`logging`
+framework. ZConfig provides one simple convenience function to do this:
+
+.. autofunction:: ZConfig.configureLoggers
+
+
+Suppose we have the following logging configuration in a file called ``simple-root-config.conf``:
+
+.. literalinclude:: simple-root-config.conf
+ :language: xml
+
+We can load this file and pass its contents to ``configureLoggers``::
+
+ >>> from ZConfig import configureLoggers
+ >>> with open('simple-root-config.conf') as f:
+ ... configureLoggers(f.read())
+
+When this returns, the root logger is configured to output messages
+logged at INFO or above to the console, as we can see in the following
+example::
+
+ >>> from logging import getLogger
+ >>> getLogger().info('An info message')
+ INFO root An info message
+ >>> getLogger().debug('A debug message')
+
+A more common configuration would see STDOUT replaced with a path to
+the file into which log entries would be written.
+
+Although loading configuration from a file is common, we could of
+course also pass a string literal to :func:`~.configureLoggers`. Any
+type of Python string (bytes or unicode) is acceptable.
+
+
+Configuration Format
+====================
+
+The configuration text is in the :ref:`ZConfig format <syntax>` and
+supports comments and substitutions.
+
+It can contain multiple ``<logger>`` elements,
+each of which can have any number of :ref:`handler elements <logging-handlers>`.
+
+.. zconfig:: ZConfig.components.logger
+ :file: logger.xml
+ :members: logger
+
+.. highlight:: xml
+
+Examples
+--------
+
+Here's the configuration we looked at above. It configures the root
+(unnamed) logger with one handler (``<logfile>``), operating at the INFO level:
+
+.. literalinclude:: simple-root-config.conf
+ :language: xml
+
+We can configure a different logger in the hierarchy to use the DEBUG
+level at the same time as we configure the root logger. We're not
+specifying a handler for it, but the default ``propagate`` value will
+let the lower level logger use the root logger's handler:
+
+.. literalinclude:: root-and-child-config.conf
+ :language: xml
+
+
+If we load that configuration from ``root-and-child-config.conf``, we
+can expect this behaviour:
+
+..
+ >>> tearDown(None)
+
+.. code-block:: pycon
+
+ >>> with open('root-and-child-config.conf') as f:
+ ... configureLoggers(f.read())
+ >>> getLogger().info('An info message')
+ INFO root An info message
+ >>> getLogger().debug('A debug message')
+ >>> getLogger('my.package').debug('A debug message')
+ DEBUG my.package A debug message
+
+.. _logging-handlers:
+
+Log Handlers
+============
+
+Many of Python's built-in log handlers can be configured with ZConfig.
+
+
+Files
+-----
+
+The ``<logfile>`` handler writes to files or standard output or standard error
+(when the ``path`` is ``STDOUT`` or ``STDERR`` respectively). It
+configures a :class:`logging.StreamHandler`. When the
+``interval`` or ``max-size`` attributes are set, the files on disk
+will be rotated either at :class:`set intervals
+<logging.handlers.TimedRotatingFileHandler>` or when files
+:class:`reach the set size <logging.handlers.RotatingFileHandler>`, respectively.
+
+.. zconfig:: ZConfig.components.logger
+ :file: handlers.xml
+ :members: logfile
+
+The System Log
+--------------
+
+The ``<syslog>`` handler configures the :class:`logging.handlers.SysLogHandler`.
+
+.. zconfig:: ZConfig.components.logger
+ :file: handlers.xml
+ :members: syslog
+
+Windows Event Log
+-----------------
+
+On Windows, the ``<win32-eventlog>`` configures the :class:`logging.handlers.NTEventLogHandler`.
+
+.. zconfig:: ZConfig.components.logger
+ :file: handlers.xml
+ :members: win32-eventlog
+
+HTTP
+----
+
+The ``<<http-logger>`` element configures :class:`logging.handlers.HTTPHandler`.
+
+.. zconfig:: ZConfig.components.logger
+ :file: handlers.xml
+ :members: http-logger
+
+
+Email
+-----
+
+ZConfig has support for Python's :class:`logging.handlers.SMTPHandler`
+via the ``<email-notifier>`` handler.
+
+.. zconfig:: ZConfig.components.logger
+ :file: handlers.xml
+ :members: email-notifier
diff --git a/doc/using-zconfig.rst b/doc/using-zconfig.rst
index 8e68d12..ed04ef6 100644
--- a/doc/using-zconfig.rst
+++ b/doc/using-zconfig.rst
@@ -11,6 +11,9 @@ Reading Configurations
For information on using ZConfig configuration documents in Python,
see :mod:`ZConfig` and especially the example at :ref:`basic-usage`.
+For information about configuring the :mod:`logging` framework, see
+:doc:`using-logging`.
+
.. _syntax:
Writing Configurations
@@ -95,8 +98,9 @@ The terminator looks like this:
The configuration data in a non-empty section consists of a sequence
-of one or more key-value pairs and sections. For example::
+of one or more key-value pairs and sections. For example:
+.. code-block:: xml
<my-section>
key-1 value-1
@@ -164,7 +168,9 @@ the same way Zope 2 does. There are some parameters which configure
the general behavior of the logging mechanism, and an arbitrary number
of **log handlers** may be specified to control how the log
messages are handled. Several log handlers are provided by the
-application. Here is an example logging configuration::
+application. Here is an example logging configuration:
+
+.. code-block:: xml
<eventlog>
| Document logging syntax
IMO, the best way to configure Python logging is with ZConfig, but it's not documented and thus a best-kept secret.
It would be great if this got documented. | zopefoundation/ZConfig | diff --git a/ZConfig/tests/test_readme.py b/ZConfig/tests/test_readme.py
index a4ed92b..2a95b6a 100644
--- a/ZConfig/tests/test_readme.py
+++ b/ZConfig/tests/test_readme.py
@@ -12,6 +12,9 @@
#
##############################################################################
import doctest
+import os
+import os.path
+import unittest
import logging
@@ -19,7 +22,6 @@ options = doctest.REPORT_NDIFF | doctest.ELLIPSIS
old = {}
def setUp(test):
- global old
logger = logging.getLogger()
old['level'] = logger.level
old['handlers'] = logger.handlers[:]
@@ -29,9 +31,33 @@ def tearDown(test):
logger.level = old['level']
logger.handlers = old['handlers']
+def docSetUp(test):
+ old['pwd'] = os.getcwd()
+ doc_path = os.path.join(
+ os.path.dirname(os.path.abspath(__file__)),
+ '..',
+ '..',
+ 'doc')
+ os.chdir(doc_path)
+ setUp(test)
+
+def docTearDown(test):
+ os.chdir(old['pwd'])
+ tearDown(test)
+
def test_suite():
- return doctest.DocFileSuite(
- '../../README.rst',
- optionflags=options,
- setUp=setUp, tearDown=tearDown,
- )
+ return unittest.TestSuite([
+ doctest.DocFileSuite(
+ '../../README.rst',
+ optionflags=options,
+ setUp=setUp, tearDown=tearDown,
+ ),
+ doctest.DocFileSuite(
+ '../../doc/using-logging.rst',
+ optionflags=options, globs=globals(),
+ setUp=docSetUp, tearDown=docTearDown,
+ ),
+ ])
+
+if __name__ == '__main__':
+ unittest.main(defaultTest='test_suite')
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_added_files",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 5
} | 3.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[test]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"zope.testrunner",
"pytest"
],
"pre_install": [],
"python": "3.6",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
docutils==0.18.1
importlib-metadata==4.8.3
iniconfig==1.1.1
packaging==21.3
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
six==1.17.0
tomli==1.2.3
typing_extensions==4.1.1
-e git+https://github.com/zopefoundation/ZConfig.git@227f96029db86137ddb02f18eedce5b4fcd6d8ba#egg=ZConfig
zipp==3.6.0
zope.exceptions==4.6
zope.interface==5.5.2
zope.testrunner==5.6
| name: ZConfig
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- docutils==0.18.1
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- six==1.17.0
- tomli==1.2.3
- typing-extensions==4.1.1
- zipp==3.6.0
- zope-exceptions==4.6
- zope-interface==5.5.2
- zope-testrunner==5.6
prefix: /opt/conda/envs/ZConfig
| [
"ZConfig/tests/test_readme.py::test_suite"
]
| []
| []
| []
| Zope Public License 2.1 | 1,069 | [
"README.rst",
"doc/root-and-child-config.conf",
"doc/simple-root-config.conf",
"doc/using-logging.rst",
"doc/standard-components.rst",
"doc/developing-with-zconfig.rst",
"doc/index.rst",
"doc/logging-components.rst",
"doc/using-zconfig.rst"
]
| [
"README.rst",
"doc/root-and-child-config.conf",
"doc/simple-root-config.conf",
"doc/using-logging.rst",
"doc/standard-components.rst",
"doc/developing-with-zconfig.rst",
"doc/index.rst",
"doc/logging-components.rst",
"doc/using-zconfig.rst"
]
|
|
eyeseast__python-frontmatter-34 | d9b7df7bb3902530f37d9ac5ccaf04e4f3f36f87 | 2017-03-08 16:19:24 | 39ac3a6852ccfd0b9c231f8e187243babc344661 | diff --git a/frontmatter/__init__.py b/frontmatter/__init__.py
index 0cb7e4d..92f7c97 100644
--- a/frontmatter/__init__.py
+++ b/frontmatter/__init__.py
@@ -180,7 +180,7 @@ def dumps(post, handler=None, **kwargs):
"""
if handler is None:
- handler = post.handler or YAMLHandler()
+ handler = getattr(post, 'handler', None) or YAMLHandler()
start_delimiter = kwargs.pop('start_delimiter', handler.START_DELIMITER)
end_delimiter = kwargs.pop('end_delimiter', handler.END_DELIMITER)
| Deal with no handler on post object
This bug: https://github.com/datadesk/django-bigbuild/issues/38
In cases where there's no `post.handler` (not just `None`) fall back to `YAMLHandler`. | eyeseast/python-frontmatter | diff --git a/test.py b/test.py
index 92a1a38..21d3976 100644
--- a/test.py
+++ b/test.py
@@ -139,6 +139,15 @@ class HandlerTest(unittest.TestCase):
format = frontmatter.detect_format(f.read(), frontmatter.handlers)
self.assertIsInstance(format, Handler)
+ def test_no_handler(self):
+ "default to YAMLHandler when no handler is attached"
+ post = frontmatter.load('tests/hello-world.markdown')
+ del post.handler
+
+ text = frontmatter.dumps(post)
+ self.assertIsInstance(
+ frontmatter.detect_format(text, frontmatter.handlers),
+ YAMLHandler)
def test_custom_handler(self):
"allow caller to specify a custom delimiter/handler"
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 1
} | 0.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.6",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
importlib-metadata==4.8.3
iniconfig==1.1.1
packaging==21.3
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
-e git+https://github.com/eyeseast/python-frontmatter.git@d9b7df7bb3902530f37d9ac5ccaf04e4f3f36f87#egg=python_frontmatter
PyYAML==6.0.1
six==1.17.0
toml==0.10.2
tomli==1.2.3
typing_extensions==4.1.1
zipp==3.6.0
| name: python-frontmatter
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- pyyaml==6.0.1
- six==1.17.0
- toml==0.10.2
- tomli==1.2.3
- typing-extensions==4.1.1
- zipp==3.6.0
prefix: /opt/conda/envs/python-frontmatter
| [
"test.py::HandlerTest::test_no_handler"
]
| []
| [
"test.py::FrontmatterTest::test_all_the_tests",
"test.py::FrontmatterTest::test_dumping_with_custom_delimiters",
"test.py::FrontmatterTest::test_empty_frontmatter",
"test.py::FrontmatterTest::test_no_frontmatter",
"test.py::FrontmatterTest::test_pretty_dumping",
"test.py::FrontmatterTest::test_to_dict",
"test.py::FrontmatterTest::test_to_string",
"test.py::FrontmatterTest::test_unicode_post",
"test.py::FrontmatterTest::test_with_crlf_string",
"test.py::FrontmatterTest::test_with_markdown_content",
"test.py::HandlerTest::test_custom_handler",
"test.py::HandlerTest::test_detect_format",
"test.py::HandlerTest::test_json",
"test.py::HandlerTest::test_toml"
]
| []
| MIT License | 1,070 | [
"frontmatter/__init__.py"
]
| [
"frontmatter/__init__.py"
]
|
|
google__mobly-140 | b4362eda0c8148644812849cdb9c741e35d5750d | 2017-03-09 06:28:28 | b4362eda0c8148644812849cdb9c741e35d5750d | diff --git a/mobly/controllers/android_device_lib/callback_handler.py b/mobly/controllers/android_device_lib/callback_handler.py
index 7c648c2..ddcf029 100644
--- a/mobly/controllers/android_device_lib/callback_handler.py
+++ b/mobly/controllers/android_device_lib/callback_handler.py
@@ -18,6 +18,11 @@ import time
from mobly.controllers.android_device_lib import snippet_event
+# The max timeout cannot be larger than the max time the socket waits for a
+# response message. Otherwise, the socket would timeout before the Rpc call
+# does, leaving both server and client in unknown states.
+MAX_TIMEOUT = 60 * 10
+
class Error(Exception):
pass
@@ -68,9 +73,15 @@ class CallbackHandler(object):
SnippetEvent, the oldest entry of the specified event.
Raises:
+ Error: If the specified timeout is longer than the max timeout
+ supported.
TimeoutError: The expected event does not occur within time limit.
"""
if timeout:
+ if timeout > MAX_TIMEOUT:
+ raise Error(
+ 'Specified timeout %s is longer than max timeout %s.' %
+ (timeout, MAX_TIMEOUT))
timeout *= 1000 # convert to milliseconds for java side
try:
raw_event = self._event_client.eventWaitAndGet(self._id,
@@ -78,8 +89,8 @@ class CallbackHandler(object):
except Exception as e:
if 'EventSnippetException: timeout.' in str(e):
raise TimeoutError(
- 'Timeout waiting for event "%s" triggered by %s (%s).'
- % (event_name, self._method_name, self._id))
+ 'Timeout waiting for event "%s" triggered by %s (%s).' %
+ (event_name, self._method_name, self._id))
raise
return snippet_event.from_dict(raw_event)
diff --git a/mobly/controllers/android_device_lib/jsonrpc_client_base.py b/mobly/controllers/android_device_lib/jsonrpc_client_base.py
index bb4b5ed..5b661a3 100644
--- a/mobly/controllers/android_device_lib/jsonrpc_client_base.py
+++ b/mobly/controllers/android_device_lib/jsonrpc_client_base.py
@@ -53,7 +53,10 @@ APP_START_WAIT_TIME = 15
UNKNOWN_UID = -1
# Maximum time to wait for the socket to open on the device.
-_SOCKET_TIMEOUT = 60
+_SOCKET_CONNECTION_TIMEOUT = 60
+
+# Maximum time to wait for a response message on the socket.
+_SOCKET_READ_TIMEOUT = callback_handler.MAX_TIMEOUT
class Error(Exception):
@@ -70,9 +73,9 @@ class ApiError(Error):
class ProtocolError(Error):
"""Raised when there is some error in exchanging data with server."""
- NO_RESPONSE_FROM_HANDSHAKE = "No response from handshake."
- NO_RESPONSE_FROM_SERVER = "No response from server."
- MISMATCHED_API_ID = "Mismatched API id."
+ NO_RESPONSE_FROM_HANDSHAKE = 'No response from handshake.'
+ NO_RESPONSE_FROM_SERVER = 'No response from server.'
+ MISMATCHED_API_ID = 'Mismatched API id.'
class JsonRpcCommand(object):
@@ -186,9 +189,9 @@ class JsonRpcClientBase(object):
"""Opens a connection to a JSON RPC server.
Opens a connection to a remote client. The connection attempt will time
- out if it takes longer than _SOCKET_TIMEOUT seconds. Each subsequent
- operation over this socket will time out after _SOCKET_TIMEOUT seconds
- as well.
+ out if it takes longer than _SOCKET_CONNECTION_TIMEOUT seconds. Each
+ subsequent operation over this socket will time out after
+ _SOCKET_READ_TIMEOUT seconds as well.
Args:
uid: int, The uid of the session to join, or UNKNOWN_UID to start a
@@ -202,8 +205,8 @@ class JsonRpcClientBase(object):
"""
self._counter = self._id_counter()
self._conn = socket.create_connection(('127.0.0.1', self.host_port),
- _SOCKET_TIMEOUT)
- self._conn.settimeout(_SOCKET_TIMEOUT)
+ _SOCKET_CONNECTION_TIMEOUT)
+ self._conn.settimeout(_SOCKET_READ_TIMEOUT)
self._client = self._conn.makefile(mode='brw')
resp = self._cmd(cmd, uid)
diff --git a/mobly/controllers/android_device_lib/sl4a_client.py b/mobly/controllers/android_device_lib/sl4a_client.py
index ef5583e..ba31e0f 100644
--- a/mobly/controllers/android_device_lib/sl4a_client.py
+++ b/mobly/controllers/android_device_lib/sl4a_client.py
@@ -57,4 +57,4 @@ class Sl4aClient(jsonrpc_client_base.JsonRpcClientBase):
"pm list package | grep com.googlecode.android_scripting"):
raise jsonrpc_client_base.AppStartError(
'%s is not installed on %s' % (
- self.app_name, self._adb.getprop('ro.boot.serialno')))
+ self.app_name, self._adb.serial))
diff --git a/mobly/controllers/android_device_lib/snippet_client.py b/mobly/controllers/android_device_lib/snippet_client.py
index 7c3ca01..793e3ae 100644
--- a/mobly/controllers/android_device_lib/snippet_client.py
+++ b/mobly/controllers/android_device_lib/snippet_client.py
@@ -56,8 +56,8 @@ class SnippetClient(jsonrpc_client_base.JsonRpcClientBase):
app_name=package,
adb_proxy=adb_proxy)
self.package = package
- self._serial = self._adb.getprop('ro.boot.serialno')
self.log = log
+ self._serial = self._adb.serial
def _do_start_app(self):
"""Overrides superclass."""
| Rpc socket times out before EventHandler.waitAndGet
`json_rpc_base._SOCKET_TIMEOUT` is 60s. Yet `EventHandler.waitAndGet` can take an arbitrary timeout length.
`EventHandler.waitAndGet` makes a call to `EventSnippet.eventWaitAndGet` through the socket, which doesn't return until the event is received or the method itself times out.
So if somebody calls `EventHandler.waitAndGet` with a timeout longer than `json_rpc_base._SOCKET_TIMEOUT`, we'd get a socket timeout error.
We should:
1. Make socket timeout longer
2. Add a guard in `EventHandler.waitAndGet` for a max timeout value. | google/mobly | diff --git a/tests/mobly/controllers/android_device_lib/callback_handler_test.py b/tests/mobly/controllers/android_device_lib/callback_handler_test.py
index d121fe8..1a3afc0 100755
--- a/tests/mobly/controllers/android_device_lib/callback_handler_test.py
+++ b/tests/mobly/controllers/android_device_lib/callback_handler_test.py
@@ -37,6 +37,10 @@ class CallbackHandlerTest(unittest.TestCase):
"""Unit tests for mobly.controllers.android_device_lib.callback_handler.
"""
+ def test_timeout_value(self):
+ self.assertGreaterEqual(jsonrpc_client_base._SOCKET_READ_TIMEOUT,
+ callback_handler.MAX_TIMEOUT)
+
def test_event_dict_to_snippet_event(self):
mock_event_client = mock.Mock()
mock_event_client.eventWaitAndGet = mock.Mock(
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 4
} | 1.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y adb"
],
"python": "3.5",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
certifi==2021.5.30
future==1.0.0
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
-e git+https://github.com/google/mobly.git@b4362eda0c8148644812849cdb9c741e35d5750d#egg=mobly
mock==1.0.1
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
pytz==2025.2
PyYAML==6.0.1
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: mobly
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib-metadata=4.8.1=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- future==1.0.0
- mock==1.0.1
- pytz==2025.2
- pyyaml==6.0.1
prefix: /opt/conda/envs/mobly
| [
"tests/mobly/controllers/android_device_lib/callback_handler_test.py::CallbackHandlerTest::test_timeout_value"
]
| []
| [
"tests/mobly/controllers/android_device_lib/callback_handler_test.py::CallbackHandlerTest::test_event_dict_to_snippet_event",
"tests/mobly/controllers/android_device_lib/callback_handler_test.py::CallbackHandlerTest::test_wait_and_get_timeout"
]
| []
| Apache License 2.0 | 1,071 | [
"mobly/controllers/android_device_lib/sl4a_client.py",
"mobly/controllers/android_device_lib/snippet_client.py",
"mobly/controllers/android_device_lib/jsonrpc_client_base.py",
"mobly/controllers/android_device_lib/callback_handler.py"
]
| [
"mobly/controllers/android_device_lib/sl4a_client.py",
"mobly/controllers/android_device_lib/snippet_client.py",
"mobly/controllers/android_device_lib/jsonrpc_client_base.py",
"mobly/controllers/android_device_lib/callback_handler.py"
]
|
|
zalando-stups__zign-47 | fec2ac288a3d4ba1d5082e8daa2feea0e1eb9ec1 | 2017-03-09 08:16:54 | 1016b1a7867e48c671bfe70fe9e7ce26b96107ea | diff --git a/zign/cli.py b/zign/cli.py
index 2fabfb2..b6cd3e0 100644
--- a/zign/cli.py
+++ b/zign/cli.py
@@ -87,7 +87,8 @@ def token(name, authorize_url, client_id, business_partner_id, refresh):
'''Create a new Platform IAM token or use an existing one.'''
try:
- token = get_token_implicit_flow(name, authorize_url, client_id, business_partner_id, refresh)
+ token = get_token_implicit_flow(name, authorize_url=authorize_url, client_id=client_id,
+ business_partner_id=business_partner_id, refresh=refresh)
except AuthenticationFailed as e:
raise click.UsageError(e)
access_token = token.get('access_token')
| --refresh doesn't do anything useful
The `--refresh` flag doesn't do anything for named tokens.
```
$ ztoken token --refresh -n postman | md5sum
34e6b2a7a14439271f077690eba31fa3 -
$ ztoken token --refresh -n postman | md5sum
34e6b2a7a14439271f077690eba31fa3 -
```
I expected this to request a new token using the existing refresh token.
Interestingly it also does weird things when requesting an unnamed token (`ztoken token --refresh`). Then a browser window is always opened and after successful authentication you get
```
{
"error": "invalid_client",
"error_description": "invalid client"
}
```
The reason for that seems to be wrong creation of the URL. It points to `.../oauth2/authorize?business_partner_id=True&client_id=ztoken&redirect_uri=http://localhost:8081&response_type=token` (True as business_partner_id).
---
There is also the general weirdness of different behavior of when new tokens are created. Creating an unnamed token just records the refresh token but not the token and no lifetime. This way every run of `ztoken` has to create a new token and so `--refresh` would be pointless there. Creating a named token records the lifetime and seems to return the same token every time as long as it is valid. Even if the `--refresh` thing were fixed, this dichotomy would not go away. | zalando-stups/zign | diff --git a/tests/test_cli.py b/tests/test_cli.py
new file mode 100644
index 0000000..8b23802
--- /dev/null
+++ b/tests/test_cli.py
@@ -0,0 +1,19 @@
+from click.testing import CliRunner
+from unittest.mock import MagicMock
+from zign.cli import cli
+
+
+def test_token(monkeypatch):
+ token = 'abc-123'
+
+ get_token_implicit_flow = MagicMock()
+ get_token_implicit_flow.return_value = {'access_token': token, 'expires_in': 1, 'token_type': 'test'}
+ monkeypatch.setattr('zign.cli.get_token_implicit_flow', get_token_implicit_flow)
+
+ runner = CliRunner()
+
+ with runner.isolated_filesystem():
+ result = runner.invoke(cli, ['token', '-n', 'mytok', '--refresh'], catch_exceptions=False)
+
+ assert token == result.output.rstrip().split('\n')[-1]
+ get_token_implicit_flow.assert_called_with('mytok', authorize_url=None, business_partner_id=None, client_id=None, refresh=True)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 1
} | 1.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2025.1.31
charset-normalizer==3.4.1
click==8.1.8
clickclick==20.10.2
coverage==7.8.0
dnspython==2.7.0
exceptiongroup==1.2.2
idna==3.10
iniconfig==2.1.0
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
pytest-cov==6.0.0
PyYAML==6.0.2
requests==2.32.3
stups-cli-support==1.1.22
stups-tokens==1.1.19
-e git+https://github.com/zalando-stups/zign.git@fec2ac288a3d4ba1d5082e8daa2feea0e1eb9ec1#egg=stups_zign
tomli==2.2.1
urllib3==2.3.0
| name: zign
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- charset-normalizer==3.4.1
- click==8.1.8
- clickclick==20.10.2
- coverage==7.8.0
- dnspython==2.7.0
- exceptiongroup==1.2.2
- idna==3.10
- iniconfig==2.1.0
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- pytest-cov==6.0.0
- pyyaml==6.0.2
- requests==2.32.3
- stups-cli-support==1.1.22
- stups-tokens==1.1.19
- tomli==2.2.1
- urllib3==2.3.0
prefix: /opt/conda/envs/zign
| [
"tests/test_cli.py::test_token"
]
| []
| []
| []
| Apache License 2.0 | 1,072 | [
"zign/cli.py"
]
| [
"zign/cli.py"
]
|
|
watson-developer-cloud__python-sdk-175 | b35391e81bc3c3831239a6da8e9461bf6f8083ac | 2017-03-09 13:46:44 | f6d0528f9369a1042372f92d37a67e3acce124eb | diff --git a/watson_developer_cloud/discovery_v1.py b/watson_developer_cloud/discovery_v1.py
index 0c7662ab..a4c88fc2 100644
--- a/watson_developer_cloud/discovery_v1.py
+++ b/watson_developer_cloud/discovery_v1.py
@@ -78,8 +78,8 @@ class DiscoveryV1(WatsonDeveloperCloudService):
:return:
"""
self._valid_name_and_description(name=name, description=description)
- if size not in range(1, 4):
- raise ValueError("Size can be 1, 2, or 3")
+ if size not in range(0, 4):
+ raise ValueError("Size can be 0, 1, 2, or 3")
body = json.dumps({"name": name,
"description": description,
| Discovery create_environment should allow size=0
https://github.com/watson-developer-cloud/python-sdk/blob/master/watson_developer_cloud/discovery_v1.py#L77
Actual API allows size=0 for free tier. | watson-developer-cloud/python-sdk | diff --git a/test/test_discovery_v1.py b/test/test_discovery_v1.py
index 90408da6..dcc06e60 100644
--- a/test/test_discovery_v1.py
+++ b/test/test_discovery_v1.py
@@ -2,6 +2,7 @@ import responses
import os
import json
import watson_developer_cloud
+import pytest
try:
from urllib.parse import urlparse, urljoin
except ImportError:
@@ -112,14 +113,12 @@ def test_create_environment():
assert thrown
- try:
+ with pytest.raises(ValueError):
discovery.create_environment(size=14)
- except ValueError as ve:
- thrown = True
- assert str(ve) == "Size can be 1, 2, or 3"
- assert thrown
- assert len(responses.calls) == 2
+ discovery.create_environment(size=0)
+
+ assert len(responses.calls) == 3
@responses.activate
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 1
} | 0.25 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest>=2.8.2",
"responses>=0.4.0",
"python_dotenv>=0.1.5",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.6",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
charset-normalizer==2.0.12
idna==3.10
importlib-metadata==4.8.3
iniconfig==1.1.1
packaging==21.3
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pysolr==3.10.0
pytest==7.0.1
python-dotenv==0.20.0
requests==2.27.1
responses==0.17.0
six==1.17.0
tomli==1.2.3
typing_extensions==4.1.1
urllib3==1.26.20
-e git+https://github.com/watson-developer-cloud/python-sdk.git@b35391e81bc3c3831239a6da8e9461bf6f8083ac#egg=watson_developer_cloud
zipp==3.6.0
| name: python-sdk
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- argparse==1.4.0
- attrs==22.2.0
- charset-normalizer==2.0.12
- idna==3.10
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pysolr==3.10.0
- pytest==7.0.1
- python-dotenv==0.20.0
- requests==2.27.1
- responses==0.17.0
- six==1.17.0
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- zipp==3.6.0
prefix: /opt/conda/envs/python-sdk
| [
"test/test_discovery_v1.py::test_create_environment"
]
| []
| [
"test/test_discovery_v1.py::test_environments",
"test/test_discovery_v1.py::test_get_environment",
"test/test_discovery_v1.py::test_update_environment",
"test/test_discovery_v1.py::test_delete_environment",
"test/test_discovery_v1.py::test_collections",
"test/test_discovery_v1.py::test_collection",
"test/test_discovery_v1.py::test_query",
"test/test_discovery_v1.py::test_configs",
"test/test_discovery_v1.py::test_empty_configs",
"test/test_discovery_v1.py::test_no_configs",
"test/test_discovery_v1.py::test_document"
]
| []
| Apache License 2.0 | 1,073 | [
"watson_developer_cloud/discovery_v1.py"
]
| [
"watson_developer_cloud/discovery_v1.py"
]
|
|
openfoodfacts__openfoodfacts-python-26 | 0858f6688ca51c0064aa1891b342a441680bf5c9 | 2017-03-09 19:10:49 | 0858f6688ca51c0064aa1891b342a441680bf5c9 | diff --git a/README.md b/README.md
index 20ee03d..218f642 100644
--- a/README.md
+++ b/README.md
@@ -137,6 +137,8 @@ states = openfoodfacts.facets.get_states()
*Get all products for given facets.*
+Page access (pagination) is available through parameters.
+
```python
products = openfoodfacts.products.get_by_facets({
'trace': 'egg',
@@ -147,7 +149,7 @@ products = openfoodfacts.products.get_by_facets({
*Get all products for given additive.*
```python
-products = openfoodfacts.products.get_by_additive(additive)
+products = openfoodfacts.products.get_by_additive(additive, page=1)
```
*Get all products for given allergen.*
diff --git a/openfoodfacts/__init__.py b/openfoodfacts/__init__.py
index b9cef3b..6ad94e6 100644
--- a/openfoodfacts/__init__.py
+++ b/openfoodfacts/__init__.py
@@ -37,8 +37,8 @@ def add_by_facet_fetch_function(facet):
else:
facet = facet[:-1]
- def func(facet_id):
- return utils.fetch('%s/%s' % (facet, facet_id))['products']
+ def func(facet_id, page=1):
+ return utils.fetch('%s/%s/%s' % (facet, facet_id, page))['products']
func.__name__ = "get_by_%s" % facet
setattr(products, func.__name__, func)
diff --git a/openfoodfacts/products.py b/openfoodfacts/products.py
index 9ea0327..141d59e 100644
--- a/openfoodfacts/products.py
+++ b/openfoodfacts/products.py
@@ -12,7 +12,7 @@ def get_product(barcode):
return utils.fetch('api/v0/product/%s' % barcode)
-def get_by_facets(query):
+def get_by_facets(query, page=1):
"""
Return products for a set of facets.
"""
@@ -28,7 +28,7 @@ def get_by_facets(query):
path.append(key)
path.append(query[key])
- return utils.fetch('/'.join(path))['products']
+ return utils.fetch('%s/%s' % ('/'.join(path), page))['products']
def search(query, page=1, page_size=20, sort_by='unique_scans'):
| Travis build failing
@frankrousseau : Travis notifications are now available on Slack, shortly after you commit. The build is currently broken (looks like an encoding issue)
writing manifest file 'openfoodfacts.egg-info/SOURCES.txt'
running build_ext
Traceback (most recent call last):
File "setup.py", line 63, in <module>
extras_require={
File "/opt/python/2.7.9/lib/python2.7/distutils/core.py", line 151, in setup
dist.run_commands()
File "/opt/python/2.7.9/lib/python2.7/distutils/dist.py", line 953, in run_commands
self.run_command(cmd)
File "/opt/python/2.7.9/lib/python2.7/distutils/dist.py", line 972, in run_command
cmd_obj.run()
File "/home/travis/virtualenv/python2.7.9/lib/python2.7/site-packages/setuptools/command/test.py", line 142, in run
self.with_project_on_sys_path(self.run_tests)
File "/home/travis/virtualenv/python2.7.9/lib/python2.7/site-packages/setuptools/command/test.py", line 122, in with_project_on_sys_path
func()
File "/home/travis/virtualenv/python2.7.9/lib/python2.7/site-packages/setuptools/command/test.py", line 163, in run_tests
testRunner=self._resolve_as_ep(self.test_runner),
File "/opt/python/2.7.9/lib/python2.7/unittest/main.py", line 94, in __init__
self.parseArgs(argv)
File "/opt/python/2.7.9/lib/python2.7/unittest/main.py", line 149, in parseArgs
self.createTests()
File "/opt/python/2.7.9/lib/python2.7/unittest/main.py", line 158, in createTests
self.module)
File "/opt/python/2.7.9/lib/python2.7/unittest/loader.py", line 130, in loadTestsFromNames
suites = [self.loadTestsFromName(name, module) for name in names]
File "/opt/python/2.7.9/lib/python2.7/unittest/loader.py", line 103, in loadTestsFromName
return self.loadTestsFromModule(obj)
File "/home/travis/virtualenv/python2.7.9/lib/python2.7/site-packages/setuptools/command/test.py", line 37, in loadTestsFromModule
tests.append(self.loadTestsFromName(submodule))
File "/opt/python/2.7.9/lib/python2.7/unittest/loader.py", line 91, in loadTestsFromName
module = __import__('.'.join(parts_copy))
File "/home/travis/build/openfoodfacts/openfoodfacts-python/tests/facets_test.py", line 3, in <module>
import openfoodfacts
File "/home/travis/build/openfoodfacts/openfoodfacts-python/openfoodfacts/__init__.py", line 4, in <module>
from . import utils
File "/home/travis/build/openfoodfacts/openfoodfacts-python/openfoodfacts/utils.py", line 21
SyntaxError: Non-ASCII character '\xe2' in file /home/travis/build/openfoodfacts/openfoodfacts-python/openfoodfacts/utils.py on line 22, but no encoding declared; see http://python.org/dev/peps/pep-0263/ for details
The command "python setup.py test" exited with 1.
0.11s$ pep8 openfoodfacts/*.py
openfoodfacts/utils.py:19:80: E501 line too long (127 characters)
openfoodfacts/utils.py:19:1: E302 expected 2 blank lines, found 1
openfoodfacts/utils.py:19:94: E231 missing whitespace after ','
openfoodfacts/utils.py:23:1: W191 indentation contains tabs
openfoodfacts/utils.py:23:1: E101 indentation contains mixed spaces and tabs
openfoodfacts/utils.py:23:2: E113 unexpected indentation
openfoodfacts/utils.py:24:95: E225 missing whitespace around operator
The command "pep8 openfoodfacts/*.py" exited with 1.
Done. Your build exited with 1. | openfoodfacts/openfoodfacts-python | diff --git a/tests/facets_test.py b/tests/facets_test.py
index 51f1fe9..b54228a 100644
--- a/tests/facets_test.py
+++ b/tests/facets_test.py
@@ -1,7 +1,5 @@
import unittest
-import os
import openfoodfacts
-import requests
import requests_mock
@@ -9,14 +7,14 @@ class TestFacets(unittest.TestCase):
def test_get_traces(self):
with requests_mock.mock() as mock:
- mock.get('http://world.openfoodfacts.org/traces.json',
+ mock.get('https://world.openfoodfacts.org/traces.json',
text='{"tags":["egg"]}')
res = openfoodfacts.facets.get_traces()
self.assertEquals(res, ["egg"])
def test_get_additives(self):
with requests_mock.mock() as mock:
- mock.get('http://world.openfoodfacts.org/additives.json',
+ mock.get('https://world.openfoodfacts.org/additives.json',
text='{"tags":["additive"]}')
res = openfoodfacts.facets.get_additives()
self.assertEquals(res, ["additive"])
diff --git a/tests/products_test.py b/tests/products_test.py
index c58a6d2..a2c3ab9 100644
--- a/tests/products_test.py
+++ b/tests/products_test.py
@@ -10,21 +10,28 @@ class TestProducts(unittest.TestCase):
def test_get_product(self):
with requests_mock.mock() as mock:
mock.get(
- 'http://world.openfoodfacts.org/api/v0/product/1223435.json',
+ 'https://world.openfoodfacts.org/api/v0/product/1223435.json',
text='{"name":"product_test"}')
res = openfoodfacts.get_product('1223435')
self.assertEquals(res, {'name': 'product_test'})
def test_get_by_trace(self):
with requests_mock.mock() as mock:
- mock.get('http://world.openfoodfacts.org/trace/egg.json',
+ mock.get('https://world.openfoodfacts.org/trace/egg/1.json',
text='{"products":["omelet"]}')
res = openfoodfacts.products.get_by_trace('egg')
self.assertEquals(res, ["omelet"])
+ def test_get_by_trace_pagination(self):
+ with requests_mock.mock() as mock:
+ mock.get('https://world.openfoodfacts.org/trace/egg/2.json',
+ text='{"products":["omelet"]}')
+ res = openfoodfacts.products.get_by_trace('egg', 2)
+ self.assertEquals(res, ["omelet"])
+
def test_get_by_country(self):
with requests_mock.mock() as mock:
- mock.get('http://world.openfoodfacts.org/country/france.json',
+ mock.get('https://world.openfoodfacts.org/country/france/1.json',
text='{"products":["omelet"]}')
res = openfoodfacts.products.get_by_country('france')
self.assertEquals(res, ["omelet"])
@@ -35,7 +42,8 @@ class TestProducts(unittest.TestCase):
with requests_mock.mock() as mock:
mock.get(
- 'http://world.openfoodfacts.org/country/france/trace/egg.json',
+ 'https://world.openfoodfacts.org/country/'
+ 'france/trace/egg/1.json',
text='{"products":["omelet"]}')
res = openfoodfacts.products.get_by_facets(
{'trace': 'egg', 'country': 'france'})
@@ -44,14 +52,14 @@ class TestProducts(unittest.TestCase):
def test_search(self):
with requests_mock.mock() as mock:
mock.get(
- 'http://world.openfoodfacts.org/cgi/search.pl?' +
+ 'https://world.openfoodfacts.org/cgi/search.pl?' +
'search_terms=kinder bueno&json=1&page=' +
'1&page_size=20&sort_by=unique_scans',
text='{"products":["kinder bueno"], "count": 1}')
res = openfoodfacts.products.search('kinder bueno')
self.assertEquals(res["products"], ["kinder bueno"])
mock.get(
- 'http://world.openfoodfacts.org/cgi/search.pl?' +
+ 'https://world.openfoodfacts.org/cgi/search.pl?' +
'search_terms=banania&json=1&page=' +
'2&page_size=10&sort_by=unique_scans',
text='{"products":["banania", "banania big"], "count": 2}')
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 2,
"test_score": 3
},
"num_modified_files": 3
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"requests_mock"
],
"pre_install": [],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2025.1.31
charset-normalizer==3.4.1
exceptiongroup==1.2.2
idna==3.10
iniconfig==2.1.0
-e git+https://github.com/openfoodfacts/openfoodfacts-python.git@0858f6688ca51c0064aa1891b342a441680bf5c9#egg=openfoodfacts
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
requests==2.11.1
requests-mock==1.12.1
tomli==2.2.1
urllib3==2.3.0
| name: openfoodfacts-python
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- charset-normalizer==3.4.1
- exceptiongroup==1.2.2
- idna==3.10
- iniconfig==2.1.0
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- requests==2.11.1
- requests-mock==1.12.1
- tomli==2.2.1
- urllib3==2.3.0
prefix: /opt/conda/envs/openfoodfacts-python
| [
"tests/products_test.py::TestProducts::test_get_by_country",
"tests/products_test.py::TestProducts::test_get_by_country_and_trace",
"tests/products_test.py::TestProducts::test_get_by_trace",
"tests/products_test.py::TestProducts::test_get_by_trace_pagination"
]
| []
| [
"tests/facets_test.py::TestFacets::test_get_additives",
"tests/facets_test.py::TestFacets::test_get_traces",
"tests/products_test.py::TestProducts::test_get_product",
"tests/products_test.py::TestProducts::test_search"
]
| []
| MIT License | 1,074 | [
"openfoodfacts/__init__.py",
"README.md",
"openfoodfacts/products.py"
]
| [
"openfoodfacts/__init__.py",
"README.md",
"openfoodfacts/products.py"
]
|
|
dpkp__kafka-python-1025 | 218a9014b749e52a2b8d40da6e3443c8132b8fa1 | 2017-03-09 20:24:44 | 618c5051493693c1305aa9f08e8a0583d5fcf0e3 | diff --git a/kafka/coordinator/base.py b/kafka/coordinator/base.py
index e811e88..ab259dd 100644
--- a/kafka/coordinator/base.py
+++ b/kafka/coordinator/base.py
@@ -245,13 +245,12 @@ class BaseCoordinator(object):
# ensure that there are no pending requests to the coordinator.
# This is important in particular to avoid resending a pending
# JoinGroup request.
- if self._client.in_flight_request_count(self.coordinator_id):
- while not self.coordinator_unknown():
- self._client.poll(delayed_tasks=False)
- if not self._client.in_flight_request_count(self.coordinator_id):
- break
- else:
- continue
+ while not self.coordinator_unknown():
+ if not self._client.in_flight_request_count(self.coordinator_id):
+ break
+ self._client.poll(delayed_tasks=False)
+ else:
+ continue
future = self._send_join_group_request()
self._client.poll(future=future)
@@ -287,6 +286,10 @@ class BaseCoordinator(object):
e = Errors.GroupCoordinatorNotAvailableError(self.coordinator_id)
return Future().failure(e)
+ elif not self._client.ready(self.coordinator_id, metadata_priority=False):
+ e = Errors.NodeNotReadyError(self.coordinator_id)
+ return Future().failure(e)
+
# send a join group request to the coordinator
log.info("(Re-)joining group %s", self.group_id)
request = JoinGroupRequest[0](
@@ -417,6 +420,13 @@ class BaseCoordinator(object):
if self.coordinator_unknown():
e = Errors.GroupCoordinatorNotAvailableError(self.coordinator_id)
return Future().failure(e)
+
+ # We assume that coordinator is ready if we're sending SyncGroup
+ # as it typically follows a successful JoinGroup
+ # Also note that if client.ready() enforces a metadata priority policy,
+ # we can get into an infinite loop if the leader assignment process
+ # itself requests a metadata update
+
future = Future()
_f = self._client.send(self.coordinator_id, request)
_f.add_callback(self._handle_sync_group_response, future, time.time())
@@ -468,6 +478,10 @@ class BaseCoordinator(object):
if node_id is None:
return Future().failure(Errors.NoBrokersAvailable())
+ elif not self._client.ready(node_id, metadata_priority=False):
+ e = Errors.NodeNotReadyError(node_id)
+ return Future().failure(e)
+
log.debug("Sending group coordinator request for group %s to broker %s",
self.group_id, node_id)
request = GroupCoordinatorRequest[0](self.group_id)
@@ -554,6 +568,14 @@ class BaseCoordinator(object):
def _send_heartbeat_request(self):
"""Send a heartbeat request"""
+ if self.coordinator_unknown():
+ e = Errors.GroupCoordinatorNotAvailableError(self.coordinator_id)
+ return Future().failure(e)
+
+ elif not self._client.ready(self.coordinator_id, metadata_priority=False):
+ e = Errors.NodeNotReadyError(self.coordinator_id)
+ return Future().failure(e)
+
request = HeartbeatRequest[0](self.group_id, self.generation, self.member_id)
log.debug("Heartbeat: %s[%s] %s", request.group, request.generation_id, request.member_id) # pylint: disable-msg=no-member
future = Future()
diff --git a/kafka/protocol/struct.py b/kafka/protocol/struct.py
index 4c1afcb..3288172 100644
--- a/kafka/protocol/struct.py
+++ b/kafka/protocol/struct.py
@@ -18,7 +18,12 @@ class Struct(AbstractType):
elif len(args) > 0:
raise ValueError('Args must be empty or mirror schema')
else:
- self.__dict__.update(kwargs)
+ for name in self.SCHEMA.names:
+ self.__dict__[name] = kwargs.pop(name, None)
+ if kwargs:
+ raise ValueError('Keyword(s) not in schema %s: %s'
+ % (list(self.SCHEMA.names),
+ ', '.join(kwargs.keys())))
# overloading encode() to support both class and instance
# Without WeakMethod() this creates circular ref, which
| KeyError: 'error_code' at kafka/protocol/struct.py __repr__
kafka-python==1.3.1, kafka 0.10.1.0
I've set consumer's session_timeout_ms bigger than group.max.session.timeout.ms in the broker and used group coordination. Here is my consumer setup:
```python
SECOND = 1000
consumer = kafka.KafkaConsumer(
topic,
bootstrap_servers = bootstrap_servers,
client_id = client_id,
group_id = group_id,
value_deserializer = value_deserializer,
auto_offset_reset = 'earliest',
enable_auto_commit = False,
consumer_timeout_ms = 1000 * 1,
api_version = (0, 10),
session_timeout_ms = SECOND * 60 * 20,
heartbeat_interval_ms = SECOND,
)
```
I use celery and I got the following exception:
```
[2016-12-12 07:50:36,281: ERROR/Worker-2] Attempt to join group some_name failed due to fatal error: [Error 26] InvalidSessionTimeoutError: JoinGroupResponse_v0(error_code=26, generation_id=0, group_protocol=u'', leader_id=u'', member_id=u'', members=[])
[2016-12-12 07:50:36,464: INFO/MainProcess] Task task_name[task_id] succeeded in 0.72630248405s: None
Traceback (most recent call last):
File "/usr/lib/python2.7/logging/__init__.py", line 851, in emit
msg = self.format(record)
File "/usr/lib/python2.7/logging/__init__.py", line 724, in format
return fmt.format(record)
File "/home/qwe/venv/local/lib/python2.7/site-packages/celery/utils/log.py", line 142, in format
msg = logging.Formatter.format(self, record)
File "/usr/lib/python2.7/logging/__init__.py", line 464, in format
record.message = record.getMessage()
File "/usr/lib/python2.7/logging/__init__.py", line 328, in getMessage
msg = msg % self.args
File "/home/qwe/venv/local/lib/python2.7/site-packages/kafka/protocol/struct.py", line 46, in __repr__
key_vals.append('%s=%s' % (name, field.repr(self.__dict__[name])))
KeyError: 'error_code'
``` | dpkp/kafka-python | diff --git a/test/test_protocol.py b/test/test_protocol.py
index 1c9f0f9..aa3dd17 100644
--- a/test/test_protocol.py
+++ b/test/test_protocol.py
@@ -7,8 +7,9 @@ import six
from kafka.protocol.api import RequestHeader
from kafka.protocol.commit import GroupCoordinatorRequest
-from kafka.protocol.fetch import FetchResponse
+from kafka.protocol.fetch import FetchRequest, FetchResponse
from kafka.protocol.message import Message, MessageSet, PartialMessage
+from kafka.protocol.metadata import MetadataRequest
from kafka.protocol.types import Int16, Int32, Int64, String
@@ -244,3 +245,16 @@ def test_decode_fetch_response_partial():
m1 = partitions[0][3]
assert len(m1) == 2
assert m1[1] == (None, None, PartialMessage())
+
+
+def test_struct_unrecognized_kwargs():
+ try:
+ mr = MetadataRequest[0](topicz='foo')
+ assert False, 'Structs should not allow unrecognized kwargs'
+ except ValueError:
+ pass
+
+
+def test_struct_missing_kwargs():
+ fr = FetchRequest[0](max_wait_time=100)
+ assert fr.min_bytes is None
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 2,
"test_score": 3
},
"num_modified_files": 2
} | 1.3 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest pytest-cov pytest-catchlog pytest-pylint pytest-sugar pytest-mock mock python-snappy lz4tools xxhash",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc libsnappy-dev"
],
"python": "3.6",
"reqs_path": [
"docs/requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.13
astroid==2.11.7
attrs==22.2.0
Babel==2.11.0
certifi==2021.5.30
charset-normalizer==2.0.12
coverage==6.2
cramjam==2.5.0
dill==0.3.4
docutils==0.18.1
idna==3.10
imagesize==1.4.1
importlib-metadata==4.8.3
iniconfig==1.1.1
isort==5.10.1
Jinja2==3.0.3
-e git+https://github.com/dpkp/kafka-python.git@218a9014b749e52a2b8d40da6e3443c8132b8fa1#egg=kafka_python
lazy-object-proxy==1.7.1
lz4tools==1.3.1.2
MarkupSafe==2.0.1
mccabe==0.7.0
mock==5.2.0
packaging==21.3
platformdirs==2.4.0
pluggy==1.0.0
pockets==0.9.1
py==1.11.0
Pygments==2.14.0
pylint==2.13.9
pyparsing==3.1.4
pytest==7.0.1
pytest-catchlog==1.2.2
pytest-cov==4.0.0
pytest-mock==3.6.1
pytest-pylint==0.18.0
pytest-sugar==0.9.6
python-snappy==0.7.3
pytz==2025.2
requests==2.27.1
six==1.17.0
snowballstemmer==2.2.0
Sphinx==5.3.0
sphinx-rtd-theme==2.0.0
sphinxcontrib-applehelp==1.0.2
sphinxcontrib-devhelp==1.0.2
sphinxcontrib-htmlhelp==2.0.0
sphinxcontrib-jquery==4.1
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-napoleon==0.7
sphinxcontrib-qthelp==1.0.3
sphinxcontrib-serializinghtml==1.1.5
termcolor==1.1.0
toml==0.10.2
tomli==1.2.3
typed-ast==1.5.5
typing_extensions==4.1.1
urllib3==1.26.20
wrapt==1.16.0
xxhash==3.2.0
zipp==3.6.0
| name: kafka-python
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.13
- astroid==2.11.7
- attrs==22.2.0
- babel==2.11.0
- charset-normalizer==2.0.12
- coverage==6.2
- cramjam==2.5.0
- dill==0.3.4
- docutils==0.18.1
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- isort==5.10.1
- jinja2==3.0.3
- lazy-object-proxy==1.7.1
- lz4tools==1.3.1.2
- markupsafe==2.0.1
- mccabe==0.7.0
- mock==5.2.0
- packaging==21.3
- platformdirs==2.4.0
- pluggy==1.0.0
- pockets==0.9.1
- py==1.11.0
- pygments==2.14.0
- pylint==2.13.9
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-catchlog==1.2.2
- pytest-cov==4.0.0
- pytest-mock==3.6.1
- pytest-pylint==0.18.0
- pytest-sugar==0.9.6
- python-snappy==0.7.3
- pytz==2025.2
- requests==2.27.1
- six==1.17.0
- snowballstemmer==2.2.0
- sphinx==5.3.0
- sphinx-rtd-theme==2.0.0
- sphinxcontrib-applehelp==1.0.2
- sphinxcontrib-devhelp==1.0.2
- sphinxcontrib-htmlhelp==2.0.0
- sphinxcontrib-jquery==4.1
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-napoleon==0.7
- sphinxcontrib-qthelp==1.0.3
- sphinxcontrib-serializinghtml==1.1.5
- termcolor==1.1.0
- toml==0.10.2
- tomli==1.2.3
- typed-ast==1.5.5
- typing-extensions==4.1.1
- urllib3==1.26.20
- wrapt==1.16.0
- xxhash==3.2.0
- zipp==3.6.0
prefix: /opt/conda/envs/kafka-python
| [
"test/test_protocol.py::test_struct_unrecognized_kwargs",
"test/test_protocol.py::test_struct_missing_kwargs"
]
| []
| [
"test/test_protocol.py::test_create_message",
"test/test_protocol.py::test_encode_message_v0",
"test/test_protocol.py::test_encode_message_v1",
"test/test_protocol.py::test_decode_message",
"test/test_protocol.py::test_encode_message_set",
"test/test_protocol.py::test_decode_message_set",
"test/test_protocol.py::test_encode_message_header",
"test/test_protocol.py::test_decode_message_set_partial",
"test/test_protocol.py::test_decode_fetch_response_partial"
]
| []
| Apache License 2.0 | 1,075 | [
"kafka/protocol/struct.py",
"kafka/coordinator/base.py"
]
| [
"kafka/protocol/struct.py",
"kafka/coordinator/base.py"
]
|
|
jmespath__jmespath.py-126 | de035093b7e52c6a6ea4411f41d2aec10d499dcb | 2017-03-10 01:54:38 | 71f44854a35c5abcdb8fbd84e25d185d0ca53f92 | diff --git a/jmespath/visitor.py b/jmespath/visitor.py
index 75af885..6c7f0c4 100644
--- a/jmespath/visitor.py
+++ b/jmespath/visitor.py
@@ -1,6 +1,7 @@
import operator
from jmespath import functions
+from jmespath.compat import string_type
def _equals(x, y):
@@ -33,6 +34,14 @@ def _is_special_integer_case(x, y):
return x is True or x is False
+def _is_comparable(x):
+ # The spec doesn't officially support string types yet,
+ # but enough people are relying on this behavior that
+ # it's been added back. This should eventually become
+ # part of the official spec.
+ return _is_actual_number(x) or isinstance(x, string_type)
+
+
def _is_actual_number(x):
# We need to handle python's quirkiness with booleans,
# specifically:
@@ -142,8 +151,8 @@ class TreeInterpreter(Visitor):
left = self.visit(node['children'][0], value)
right = self.visit(node['children'][1], value)
num_types = (int, float)
- if not (_is_actual_number(left) and
- _is_actual_number(right)):
+ if not (_is_comparable(left) and
+ _is_comparable(right)):
return None
return comparator_func(left, right)
| 0.9.1 date comparisons not working
From https://gitter.im/jmespath/chat:
```
aws ec2 describe-snapshots --owner-id self --query 'Snapshots[?StartTime>='2017--02-01']'
The above command returns and empty set ( '[]' ) when run with v0.9.1, and a list of appropriate snapshots with run with v0.9.0.
```
I have the same problem fetching AMIs by date. It appears Date comparisons (except ==) are broken in 0.9.1 (and work fine in 0.9.0) | jmespath/jmespath.py | diff --git a/tests/test_search.py b/tests/test_search.py
index 71ab3dc..c26a2fa 100644
--- a/tests/test_search.py
+++ b/tests/test_search.py
@@ -36,3 +36,10 @@ class TestSearchOptions(unittest.TestCase):
jmespath.search('my_subtract(`10`, `3`)', {}, options=options),
7
)
+
+
+class TestPythonSpecificCases(unittest.TestCase):
+ def test_can_compare_strings(self):
+ # This is python specific behavior that's not in the official spec
+ # yet, but this was regression from 0.9.0 so it's been added back.
+ self.assertTrue(jmespath.search('a < b', {'a': '2016', 'b': '2017'}))
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 1
} | 0.9 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
coverage==3.7.1
distlib==0.3.9
filelock==3.4.1
hypothesis==3.1.0
importlib-metadata==4.8.3
importlib-resources==5.4.0
iniconfig==1.1.1
-e git+https://github.com/jmespath/jmespath.py.git@de035093b7e52c6a6ea4411f41d2aec10d499dcb#egg=jmespath
nose==1.2.1
packaging==21.3
platformdirs==2.4.0
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
tomli==1.2.3
tox==1.4.2
typing_extensions==4.1.1
virtualenv==20.17.1
zipp==3.6.0
| name: jmespath.py
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- coverage==3.7.1
- distlib==0.3.9
- filelock==3.4.1
- hypothesis==3.1.0
- importlib-metadata==4.8.3
- importlib-resources==5.4.0
- iniconfig==1.1.1
- nose==1.2.1
- packaging==21.3
- platformdirs==2.4.0
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- tomli==1.2.3
- tox==1.4.2
- typing-extensions==4.1.1
- virtualenv==20.17.1
- wheel==0.24.0
- zipp==3.6.0
prefix: /opt/conda/envs/jmespath.py
| [
"tests/test_search.py::TestPythonSpecificCases::test_can_compare_strings"
]
| []
| [
"tests/test_search.py::TestSearchOptions::test_can_provide_custom_functions",
"tests/test_search.py::TestSearchOptions::test_can_provide_dict_cls"
]
| []
| MIT License | 1,076 | [
"jmespath/visitor.py"
]
| [
"jmespath/visitor.py"
]
|
|
chriskuehl__lazy-build-16 | c53270b41e1d3716c301e65a283d99f86aa55bb9 | 2017-03-10 19:38:00 | c53270b41e1d3716c301e65a283d99f86aa55bb9 | diff --git a/lazy_build/cache.py b/lazy_build/cache.py
index 27124d9..f1ec74a 100644
--- a/lazy_build/cache.py
+++ b/lazy_build/cache.py
@@ -20,10 +20,15 @@ class S3Backend(collections.namedtuple('S3Backend', (
def key_for_ctx(self, ctx):
return self.path.rstrip('/') + '/' + ctx.hash
+ def artifact_paths(self, ctx):
+ key = self.key_for_ctx(ctx)
+ return key + '.tar.gz', key + '.json'
+
def has_artifact(self, ctx):
+ tarball, json = self.artifact_paths(ctx)
# what a ridiculous dance we have to do here...
try:
- self.s3.Object(self.bucket, self.key_for_ctx(ctx)).load()
+ self.s3.Object(self.bucket, tarball).load()
except botocore.exceptions.ClientError as ex:
if ex.response['Error']['Code'] == '404':
return False
@@ -33,18 +38,20 @@ class S3Backend(collections.namedtuple('S3Backend', (
return True
def get_artifact(self, ctx):
+ tarball, json = self.artifact_paths(ctx)
fd, path = tempfile.mkstemp()
os.close(fd)
self.s3.Bucket(self.bucket).download_file(
- self.key_for_ctx(ctx),
+ tarball,
path,
)
return path
def store_artifact(self, ctx, path):
+ tarball, json = self.artifact_paths(ctx)
self.s3.Bucket(self.bucket).upload_file(
path,
- self.key_for_ctx(ctx),
+ tarball,
)
def invalidate_artifact(self, ctx):
diff --git a/lazy_build/context.py b/lazy_build/context.py
index eadebb9..d7e0009 100644
--- a/lazy_build/context.py
+++ b/lazy_build/context.py
@@ -96,7 +96,7 @@ def build_context(conf, command):
def package_artifact(conf):
fd, tmp = tempfile.mkstemp()
os.close(fd)
- with tarfile.TarFile(tmp, mode='w') as tf:
+ with tarfile.open(tmp, mode='w:gz') as tf:
for output_path in conf.output:
if os.path.isdir(output_path):
for path, _, filenames in os.walk(output_path):
@@ -115,5 +115,5 @@ def extract_artifact(conf, artifact):
else:
os.remove(output_path)
- with tarfile.TarFile(artifact) as tf:
+ with tarfile.open(artifact, 'r:gz') as tf:
tf.extractall()
| gzip before upload
In some common cases (e.g. node_modules), this reduces ~300MB artifacts to ~100MB artifacts. Probably worth the CPU time. | chriskuehl/lazy-build | diff --git a/tests/context_test.py b/tests/context_test.py
index 1e58218..b3271f5 100644
--- a/tests/context_test.py
+++ b/tests/context_test.py
@@ -77,7 +77,7 @@ def test_package_artifact(tmpdir):
after_download=None,
))
try:
- with tarfile.TarFile(tmp) as tf:
+ with tarfile.open(tmp, 'r:gz') as tf:
members = {member.name for member in tf.getmembers()}
finally:
os.remove(tmp)
@@ -94,7 +94,7 @@ def test_extract_artifact(tmpdir):
tmpdir.join('a/b/sup').ensure()
tar = tmpdir.join('my.tar').strpath
- with tarfile.TarFile(tar, 'w') as tf:
+ with tarfile.open(tar, 'w:gz') as tf:
for path in ('my.txt', 'hello/there.txt', 'a/b/c/d.txt'):
ti = tarfile.TarInfo(path)
ti.size = 6
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 2
},
"num_modified_files": 2
} | 0.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"covdefaults",
"coverage",
"twine"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | backports.tarfile==1.2.0
boto3==1.37.23
botocore==1.37.23
certifi==2025.1.31
cffi==1.17.1
charset-normalizer==3.4.1
covdefaults==2.3.0
coverage==7.8.0
cryptography==44.0.2
docutils==0.21.2
exceptiongroup==1.2.2
id==1.5.0
idna==3.10
importlib_metadata==8.6.1
iniconfig==2.1.0
jaraco.classes==3.4.0
jaraco.context==6.0.1
jaraco.functools==4.1.0
jeepney==0.9.0
jmespath==1.0.1
keyring==25.6.0
-e git+https://github.com/chriskuehl/lazy-build.git@c53270b41e1d3716c301e65a283d99f86aa55bb9#egg=lazy_build
markdown-it-py==3.0.0
mdurl==0.1.2
more-itertools==10.6.0
nh3==0.2.21
packaging==24.2
pluggy==1.5.0
pycparser==2.22
Pygments==2.19.1
pytest==8.3.5
python-dateutil==2.9.0.post0
readme_renderer==44.0
requests==2.32.3
requests-toolbelt==1.0.0
rfc3986==2.0.0
rich==14.0.0
s3transfer==0.11.4
SecretStorage==3.3.3
six==1.17.0
tomli==2.2.1
twine==6.1.0
typing_extensions==4.13.0
urllib3==1.26.20
zipp==3.21.0
| name: lazy-build
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- backports-tarfile==1.2.0
- boto3==1.37.23
- botocore==1.37.23
- certifi==2025.1.31
- cffi==1.17.1
- charset-normalizer==3.4.1
- covdefaults==2.3.0
- coverage==7.8.0
- cryptography==44.0.2
- docutils==0.21.2
- exceptiongroup==1.2.2
- id==1.5.0
- idna==3.10
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- jaraco-classes==3.4.0
- jaraco-context==6.0.1
- jaraco-functools==4.1.0
- jeepney==0.9.0
- jmespath==1.0.1
- keyring==25.6.0
- markdown-it-py==3.0.0
- mdurl==0.1.2
- more-itertools==10.6.0
- nh3==0.2.21
- packaging==24.2
- pluggy==1.5.0
- pycparser==2.22
- pygments==2.19.1
- pytest==8.3.5
- python-dateutil==2.9.0.post0
- readme-renderer==44.0
- requests==2.32.3
- requests-toolbelt==1.0.0
- rfc3986==2.0.0
- rich==14.0.0
- s3transfer==0.11.4
- secretstorage==3.3.3
- six==1.17.0
- tomli==2.2.1
- twine==6.1.0
- typing-extensions==4.13.0
- urllib3==1.26.20
- zipp==3.21.0
prefix: /opt/conda/envs/lazy-build
| [
"tests/context_test.py::test_package_artifact",
"tests/context_test.py::test_extract_artifact"
]
| []
| [
"tests/context_test.py::test_should_ignore_true[patterns0-venv]",
"tests/context_test.py::test_should_ignore_true[patterns1-venv]",
"tests/context_test.py::test_should_ignore_true[patterns2-this/is/some/venv]",
"tests/context_test.py::test_should_ignore_true[patterns3-this/is/some/venv/with/a/file]",
"tests/context_test.py::test_should_ignore_true[patterns4-this/is/some/venv/with/a/file]",
"tests/context_test.py::test_should_ignore_true[patterns5-something.swp]",
"tests/context_test.py::test_should_ignore_true[patterns6-hello/there/something.swp]",
"tests/context_test.py::test_should_ignore_true[patterns7-my/.thing.txt.swo]",
"tests/context_test.py::test_should_ignore_false[patterns0-this/is/some/venv]",
"tests/context_test.py::test_should_ignore_false[patterns1-this/is/some/venv]",
"tests/context_test.py::test_should_ignore_false[patterns2-venv2]",
"tests/context_test.py::test_build_context_simple"
]
| []
| MIT License | 1,077 | [
"lazy_build/context.py",
"lazy_build/cache.py"
]
| [
"lazy_build/context.py",
"lazy_build/cache.py"
]
|
|
Azure__azure-cli-2467 | d79710730933432d2c8ccd722d31fbbf75845ce0 | 2017-03-11 06:05:00 | eb12ac454cbe1ddb59c86cdf2045e1912660e750 | codecov-io: # [Codecov](https://codecov.io/gh/Azure/azure-cli/pull/2467?src=pr&el=h1) Report
> Merging [#2467](https://codecov.io/gh/Azure/azure-cli/pull/2467?src=pr&el=desc) into [master](https://codecov.io/gh/Azure/azure-cli/commit/d79710730933432d2c8ccd722d31fbbf75845ce0?src=pr&el=desc) will **increase** coverage by `0.01%`.
> The diff coverage is `n/a`.
```diff
@@ Coverage Diff @@
## master #2467 +/- ##
==========================================
+ Coverage 72.44% 72.45% +0.01%
==========================================
Files 362 362
Lines 19547 19547
Branches 2865 2865
==========================================
+ Hits 14160 14162 +2
+ Misses 4475 4472 -3
- Partials 912 913 +1
```
| [Impacted Files](https://codecov.io/gh/Azure/azure-cli/pull/2467?src=pr&el=tree) | Coverage Δ | |
|---|---|---|
| [...zure-cli-vm/azure/cli/command_modules/vm/custom.py](https://codecov.io/gh/Azure/azure-cli/compare/d79710730933432d2c8ccd722d31fbbf75845ce0...47b28ad2c4278b0ec927820cf60d22de1d63b596?src=pr&el=tree#diff-c3JjL2NvbW1hbmRfbW9kdWxlcy9henVyZS1jbGktdm0vYXp1cmUvY2xpL2NvbW1hbmRfbW9kdWxlcy92bS9jdXN0b20ucHk=) | `74.5% <ø> (+0.22%)` | :white_check_mark: |
| [...dback/azure/cli/command_modules/feedback/custom.py](https://codecov.io/gh/Azure/azure-cli/compare/d79710730933432d2c8ccd722d31fbbf75845ce0...47b28ad2c4278b0ec927820cf60d22de1d63b596?src=pr&el=tree#diff-c3JjL2NvbW1hbmRfbW9kdWxlcy9henVyZS1jbGktZmVlZGJhY2svYXp1cmUvY2xpL2NvbW1hbmRfbW9kdWxlcy9mZWVkYmFjay9jdXN0b20ucHk=) | `34.69% <0%> (ø)` | :white_check_mark: |
| [src/azure-cli-core/azure/cli/core/_util.py](https://codecov.io/gh/Azure/azure-cli/compare/d79710730933432d2c8ccd722d31fbbf75845ce0...47b28ad2c4278b0ec927820cf60d22de1d63b596?src=pr&el=tree#diff-c3JjL2F6dXJlLWNsaS1jb3JlL2F6dXJlL2NsaS9jb3JlL191dGlsLnB5) | `66.66% <0%> (ø)` | :white_check_mark: |
------
[Continue to review full report at Codecov](https://codecov.io/gh/Azure/azure-cli/pull/2467?src=pr&el=continue).
> **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta)
> `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data`
> Powered by [Codecov](https://codecov.io/gh/Azure/azure-cli/pull/2467?src=pr&el=footer). Last update [d797107...47b28ad](https://codecov.io/gh/Azure/azure-cli/compare/d79710730933432d2c8ccd722d31fbbf75845ce0...47b28ad2c4278b0ec927820cf60d22de1d63b596?src=pr&el=footer&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments). | diff --git a/src/command_modules/azure-cli-resource/azure/cli/command_modules/resource/_help.py b/src/command_modules/azure-cli-resource/azure/cli/command_modules/resource/_help.py
index 3df308105..bcff81d87 100644
--- a/src/command_modules/azure-cli-resource/azure/cli/command_modules/resource/_help.py
+++ b/src/command_modules/azure-cli-resource/azure/cli/command_modules/resource/_help.py
@@ -160,7 +160,7 @@ helps['feature'] = """
helps['group'] = """
type: group
- short-summary: Manage resource groups.
+ short-summary: Manage resource groups and template deployments.
"""
helps['group exists'] = """
diff --git a/src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/custom.py b/src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/custom.py
index da2c20927..22e31d7a2 100644
--- a/src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/custom.py
+++ b/src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/custom.py
@@ -1291,7 +1291,9 @@ def get_vmss_instance_view(resource_group_name, vm_scale_set_name, instance_id=N
if instance_id:
if instance_id == '*':
return client.virtual_machine_scale_set_vms.list(resource_group_name,
- vm_scale_set_name)
+ vm_scale_set_name,
+ select='instanceView',
+ expand='instanceView')
else:
return client.virtual_machine_scale_set_vms.get_instance_view(resource_group_name,
vm_scale_set_name,
| az vmss get-instance-view returns model view when specifying all VMs
If you run the following to get the instance view for all VMs in a scale set:
```
az vmss get-instance-view -g rgname -n vmssname --instance-id \*
```
CLI does:
```
requests.packages.urllib3.connectionpool : https://management.azure.com:443 "GET /subscriptions/<subid>/resourceGroups/rgname/providers/Microsoft.Compute/virtualMachineScaleSets/vmssname/virtualMachines?api-version=2016-04-30-preview”
```
which returns the model view for all VMs.
It should be doing:
```
requests.packages.urllib3.connectionpool : https://management.azure.com:443 "GET /subscriptions/<subid>/resourceGroups/rgname/providers/Microsoft.Compute/virtualMachineScaleSets/vmssname/virtualMachines?$expand=instanceView&$select=instanceView&api-version=2016-04-30-preview”
```
The instance-view command works fine if you just query one VM.
| Azure/azure-cli | diff --git a/src/command_modules/azure-cli-vm/tests/test_custom_vm_commands.py b/src/command_modules/azure-cli-vm/tests/test_custom_vm_commands.py
index 54809d1d5..b6ff5ac31 100644
--- a/src/command_modules/azure-cli-vm/tests/test_custom_vm_commands.py
+++ b/src/command_modules/azure-cli-vm/tests/test_custom_vm_commands.py
@@ -14,7 +14,7 @@ from azure.cli.command_modules.vm.custom import (_get_access_extension_upgrade_i
_LINUX_ACCESS_EXT,
_WINDOWS_ACCESS_EXT)
from azure.cli.command_modules.vm.custom import \
- (attach_unmanaged_data_disk, detach_data_disk)
+ (attach_unmanaged_data_disk, detach_data_disk, get_vmss_instance_view)
from azure.cli.command_modules.vm.disk_encryption import enable, disable
from azure.mgmt.compute.models import (NetworkProfile, StorageProfile, DataDisk, OSDisk,
OperatingSystemTypes, InstanceViewStatus,
@@ -196,6 +196,17 @@ class Test_Vm_Custom(unittest.TestCase):
mock_vm_set.assert_called_once_with(vm)
self.assertEqual(len(vm.storage_profile.data_disks), 0)
+ @mock.patch('azure.cli.command_modules.vm.custom._compute_client_factory')
+ def test_show_vmss_instance_view(self, factory_mock):
+ vm_client = mock.MagicMock()
+ factory_mock.return_value = vm_client
+
+ # execute
+ get_vmss_instance_view('rg1', 'vmss1', '*')
+ # assert
+ vm_client.virtual_machine_scale_set_vms.list.assert_called_once_with('rg1', 'vmss1', expand='instanceView',
+ select='instanceView')
+
# pylint: disable=line-too-long
@mock.patch('azure.cli.command_modules.vm.disk_encryption._compute_client_factory', autospec=True)
@mock.patch('azure.cli.command_modules.vm.disk_encryption._get_keyvault_key_url', autospec=True)
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 2
} | 2.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "python scripts/dev_setup.py",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | adal==0.4.3
applicationinsights==0.10.0
argcomplete==1.8.0
astroid==1.4.9
attrs==22.2.0
autopep8==1.2.4
azure-batch==2.0.0
-e git+https://github.com/Azure/azure-cli.git@d79710730933432d2c8ccd722d31fbbf75845ce0#egg=azure_cli&subdirectory=src/azure-cli
-e git+https://github.com/Azure/azure-cli.git@d79710730933432d2c8ccd722d31fbbf75845ce0#egg=azure_cli_acr&subdirectory=src/command_modules/azure-cli-acr
-e git+https://github.com/Azure/azure-cli.git@d79710730933432d2c8ccd722d31fbbf75845ce0#egg=azure_cli_acs&subdirectory=src/command_modules/azure-cli-acs
-e git+https://github.com/Azure/azure-cli.git@d79710730933432d2c8ccd722d31fbbf75845ce0#egg=azure_cli_appservice&subdirectory=src/command_modules/azure-cli-appservice
-e git+https://github.com/Azure/azure-cli.git@d79710730933432d2c8ccd722d31fbbf75845ce0#egg=azure_cli_batch&subdirectory=src/command_modules/azure-cli-batch
-e git+https://github.com/Azure/azure-cli.git@d79710730933432d2c8ccd722d31fbbf75845ce0#egg=azure_cli_cloud&subdirectory=src/command_modules/azure-cli-cloud
-e git+https://github.com/Azure/azure-cli.git@d79710730933432d2c8ccd722d31fbbf75845ce0#egg=azure_cli_component&subdirectory=src/command_modules/azure-cli-component
-e git+https://github.com/Azure/azure-cli.git@d79710730933432d2c8ccd722d31fbbf75845ce0#egg=azure_cli_configure&subdirectory=src/command_modules/azure-cli-configure
-e git+https://github.com/Azure/azure-cli.git@d79710730933432d2c8ccd722d31fbbf75845ce0#egg=azure_cli_container&subdirectory=src/command_modules/azure-cli-container
-e git+https://github.com/Azure/azure-cli.git@d79710730933432d2c8ccd722d31fbbf75845ce0#egg=azure_cli_core&subdirectory=src/azure-cli-core
-e git+https://github.com/Azure/azure-cli.git@d79710730933432d2c8ccd722d31fbbf75845ce0#egg=azure_cli_datalake&subdirectory=src/command_modules/azure-cli-datalake
-e git+https://github.com/Azure/azure-cli.git@d79710730933432d2c8ccd722d31fbbf75845ce0#egg=azure_cli_documentdb&subdirectory=src/command_modules/azure-cli-documentdb
-e git+https://github.com/Azure/azure-cli.git@d79710730933432d2c8ccd722d31fbbf75845ce0#egg=azure_cli_feedback&subdirectory=src/command_modules/azure-cli-feedback
-e git+https://github.com/Azure/azure-cli.git@d79710730933432d2c8ccd722d31fbbf75845ce0#egg=azure_cli_find&subdirectory=src/command_modules/azure-cli-find
-e git+https://github.com/Azure/azure-cli.git@d79710730933432d2c8ccd722d31fbbf75845ce0#egg=azure_cli_iot&subdirectory=src/command_modules/azure-cli-iot
-e git+https://github.com/Azure/azure-cli.git@d79710730933432d2c8ccd722d31fbbf75845ce0#egg=azure_cli_keyvault&subdirectory=src/command_modules/azure-cli-keyvault
-e git+https://github.com/Azure/azure-cli.git@d79710730933432d2c8ccd722d31fbbf75845ce0#egg=azure_cli_network&subdirectory=src/command_modules/azure-cli-network
-e git+https://github.com/Azure/azure-cli.git@d79710730933432d2c8ccd722d31fbbf75845ce0#egg=azure_cli_nspkg&subdirectory=src/azure-cli-nspkg
-e git+https://github.com/Azure/azure-cli.git@d79710730933432d2c8ccd722d31fbbf75845ce0#egg=azure_cli_profile&subdirectory=src/command_modules/azure-cli-profile
-e git+https://github.com/Azure/azure-cli.git@d79710730933432d2c8ccd722d31fbbf75845ce0#egg=azure_cli_redis&subdirectory=src/command_modules/azure-cli-redis
-e git+https://github.com/Azure/azure-cli.git@d79710730933432d2c8ccd722d31fbbf75845ce0#egg=azure_cli_resource&subdirectory=src/command_modules/azure-cli-resource
-e git+https://github.com/Azure/azure-cli.git@d79710730933432d2c8ccd722d31fbbf75845ce0#egg=azure_cli_role&subdirectory=src/command_modules/azure-cli-role
-e git+https://github.com/Azure/azure-cli.git@d79710730933432d2c8ccd722d31fbbf75845ce0#egg=azure_cli_sql&subdirectory=src/command_modules/azure-cli-sql
-e git+https://github.com/Azure/azure-cli.git@d79710730933432d2c8ccd722d31fbbf75845ce0#egg=azure_cli_storage&subdirectory=src/command_modules/azure-cli-storage
-e git+https://github.com/Azure/azure-cli.git@d79710730933432d2c8ccd722d31fbbf75845ce0#egg=azure_cli_taskhelp&subdirectory=src/command_modules/azure-cli-taskhelp
-e git+https://github.com/Azure/azure-cli.git@d79710730933432d2c8ccd722d31fbbf75845ce0#egg=azure_cli_testsdk&subdirectory=src/azure-cli-testsdk
-e git+https://github.com/Azure/azure-cli.git@d79710730933432d2c8ccd722d31fbbf75845ce0#egg=azure_cli_utility_automation&subdirectory=scripts
-e git+https://github.com/Azure/azure-cli.git@d79710730933432d2c8ccd722d31fbbf75845ce0#egg=azure_cli_vm&subdirectory=src/command_modules/azure-cli-vm
azure-common==1.1.4
azure-core==1.24.2
azure-datalake-store==0.0.5
azure-graphrbac==0.30.0rc6
azure-keyvault==0.1.0
azure-mgmt-authorization==0.30.0rc6
azure-mgmt-batch==3.0.0
azure-mgmt-compute==0.33.1rc1
azure-mgmt-containerregistry==0.1.1
azure-mgmt-datalake-analytics==0.1.3
azure-mgmt-datalake-nspkg==3.0.1
azure-mgmt-datalake-store==0.1.3
azure-mgmt-dns==1.0.0
azure-mgmt-documentdb==0.1.0
azure-mgmt-iothub==0.2.1
azure-mgmt-keyvault==0.30.0
azure-mgmt-network==0.30.0
azure-mgmt-nspkg==3.0.2
azure-mgmt-redis==1.0.0
azure-mgmt-resource==0.30.2
azure-mgmt-sql==0.3.1
azure-mgmt-storage==0.31.0
azure-mgmt-trafficmanager==0.30.0rc6
azure-mgmt-web==0.31.0
azure-nspkg==3.0.2
azure-storage==0.33.0
certifi==2021.5.30
cffi==1.15.1
colorama==0.3.7
coverage==4.2
cryptography==40.0.2
flake8==3.2.1
importlib-metadata==4.8.3
iniconfig==1.1.1
isodate==0.7.0
jeepney==0.7.1
jmespath==0.10.0
keyring==23.4.1
lazy-object-proxy==1.7.1
mccabe==0.5.3
mock==1.3.0
msrest==0.4.29
msrestazure==0.4.34
nose==1.3.7
oauthlib==3.2.2
packaging==21.3
paramiko==2.0.2
pbr==6.1.1
pep8==1.7.1
pluggy==1.0.0
py==1.11.0
pyasn1==0.5.1
pycodestyle==2.2.0
pycparser==2.21
pyflakes==1.3.0
Pygments==2.1.3
PyJWT==2.4.0
pylint==1.5.4
pyOpenSSL==16.1.0
pyparsing==3.1.4
pytest==7.0.1
python-dateutil==2.9.0.post0
PyYAML==3.11
requests==2.9.1
requests-oauthlib==2.0.0
scp==0.15.0
SecretStorage==3.3.3
six==1.10.0
sshtunnel==0.4.0
tabulate==0.7.5
tomli==1.2.3
typing-extensions==4.1.1
urllib3==1.16
vcrpy==1.10.3
Whoosh==2.7.4
wrapt==1.16.0
zipp==3.6.0
| name: azure-cli
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- adal==0.4.3
- applicationinsights==0.10.0
- argcomplete==1.8.0
- astroid==1.4.9
- attrs==22.2.0
- autopep8==1.2.4
- azure-batch==2.0.0
- azure-common==1.1.4
- azure-core==1.24.2
- azure-datalake-store==0.0.5
- azure-graphrbac==0.30.0rc6
- azure-keyvault==0.1.0
- azure-mgmt-authorization==0.30.0rc6
- azure-mgmt-batch==3.0.0
- azure-mgmt-compute==0.33.1rc1
- azure-mgmt-containerregistry==0.1.1
- azure-mgmt-datalake-analytics==0.1.3
- azure-mgmt-datalake-nspkg==3.0.1
- azure-mgmt-datalake-store==0.1.3
- azure-mgmt-dns==1.0.0
- azure-mgmt-documentdb==0.1.0
- azure-mgmt-iothub==0.2.1
- azure-mgmt-keyvault==0.30.0
- azure-mgmt-network==0.30.0
- azure-mgmt-nspkg==3.0.2
- azure-mgmt-redis==1.0.0
- azure-mgmt-resource==0.30.2
- azure-mgmt-sql==0.3.1
- azure-mgmt-storage==0.31.0
- azure-mgmt-trafficmanager==0.30.0rc6
- azure-mgmt-web==0.31.0
- azure-nspkg==3.0.2
- azure-storage==0.33.0
- cffi==1.15.1
- colorama==0.3.7
- coverage==4.2
- cryptography==40.0.2
- flake8==3.2.1
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- isodate==0.7.0
- jeepney==0.7.1
- jmespath==0.10.0
- keyring==23.4.1
- lazy-object-proxy==1.7.1
- mccabe==0.5.3
- mock==1.3.0
- msrest==0.4.29
- msrestazure==0.4.34
- nose==1.3.7
- oauthlib==3.2.2
- packaging==21.3
- paramiko==2.0.2
- pbr==6.1.1
- pep8==1.7.1
- pip==9.0.1
- pluggy==1.0.0
- py==1.11.0
- pyasn1==0.5.1
- pycodestyle==2.2.0
- pycparser==2.21
- pyflakes==1.3.0
- pygments==2.1.3
- pyjwt==2.4.0
- pylint==1.5.4
- pyopenssl==16.1.0
- pyparsing==3.1.4
- pytest==7.0.1
- python-dateutil==2.9.0.post0
- pyyaml==3.11
- requests==2.9.1
- requests-oauthlib==2.0.0
- scp==0.15.0
- secretstorage==3.3.3
- setuptools==30.4.0
- six==1.10.0
- sshtunnel==0.4.0
- tabulate==0.7.5
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.16
- vcrpy==1.10.3
- whoosh==2.7.4
- wrapt==1.16.0
- zipp==3.6.0
prefix: /opt/conda/envs/azure-cli
| [
"src/command_modules/azure-cli-vm/tests/test_custom_vm_commands.py::Test_Vm_Custom::test_show_vmss_instance_view"
]
| []
| [
"src/command_modules/azure-cli-vm/tests/test_custom_vm_commands.py::Test_Vm_Custom::test_attach_existing_datadisk_on_vm",
"src/command_modules/azure-cli-vm/tests/test_custom_vm_commands.py::Test_Vm_Custom::test_attach_new_datadisk_custom_on_vm",
"src/command_modules/azure-cli-vm/tests/test_custom_vm_commands.py::Test_Vm_Custom::test_attach_new_datadisk_default_on_vm",
"src/command_modules/azure-cli-vm/tests/test_custom_vm_commands.py::Test_Vm_Custom::test_deattach_disk_on_vm",
"src/command_modules/azure-cli-vm/tests/test_custom_vm_commands.py::Test_Vm_Custom::test_disable_boot_diagnostics_on_vm",
"src/command_modules/azure-cli-vm/tests/test_custom_vm_commands.py::Test_Vm_Custom::test_disable_encryption_error_cases_handling",
"src/command_modules/azure-cli-vm/tests/test_custom_vm_commands.py::Test_Vm_Custom::test_enable_boot_diagnostics_on_vm_never_enabled",
"src/command_modules/azure-cli-vm/tests/test_custom_vm_commands.py::Test_Vm_Custom::test_enable_boot_diagnostics_skip_when_enabled_already",
"src/command_modules/azure-cli-vm/tests/test_custom_vm_commands.py::Test_Vm_Custom::test_enable_encryption_error_cases_handling",
"src/command_modules/azure-cli-vm/tests/test_custom_vm_commands.py::Test_Vm_Custom::test_get_access_extension_upgrade_info",
"src/command_modules/azure-cli-vm/tests/test_custom_vm_commands.py::Test_Vm_Custom::test_merge_secrets"
]
| []
| MIT License | 1,078 | [
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/custom.py",
"src/command_modules/azure-cli-resource/azure/cli/command_modules/resource/_help.py"
]
| [
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/custom.py",
"src/command_modules/azure-cli-resource/azure/cli/command_modules/resource/_help.py"
]
|
postlund__pyatv-60 | 8937e869c3ce7d86edad5d2c88d2139fe57c3127 | 2017-03-11 10:24:13 | e61c8216da8b7cb8ec67dd194129037336d11fca | coveralls:
[](https://coveralls.io/builds/10544411)
Coverage remained the same at 96.547% when pulling **a138f1ea040fec310dbb8629f57194a7ed97ce7b on media_kinds** into **8937e869c3ce7d86edad5d2c88d2139fe57c3127 on master**.
| diff --git a/pyatv/convert.py b/pyatv/convert.py
index 98c7e77e..0843f19b 100644
--- a/pyatv/convert.py
+++ b/pyatv/convert.py
@@ -7,11 +7,11 @@ def media_kind(kind):
"""Convert iTunes media kind to API representation."""
if kind in [1]:
return const.MEDIA_TYPE_UNKNOWN
- elif kind in [3, 7, 11, 12, 13, 18]:
+ elif kind in [3, 7, 11, 12, 13, 18, 32]:
return const.MEDIA_TYPE_VIDEO
- elif kind in [2, 4, 10, 14, 17, 21]:
+ elif kind in [2, 4, 10, 14, 17, 21, 36]:
return const.MEDIA_TYPE_MUSIC
- elif kind in [8]:
+ elif kind in [8, 64]:
return const.MEDIA_TYPE_TV
raise exceptions.UnknownMediaKind('Unknown media kind: ' + str(kind))
| Unknown media kind TV show
The media kind "TV Show" is not recognized correctly.
pyatv.exceptions.UnknownMediaKind: Unknown media kind: 64 | postlund/pyatv | diff --git a/tests/test_convert.py b/tests/test_convert.py
index 99d8bdd3..77dfab2e 100644
--- a/tests/test_convert.py
+++ b/tests/test_convert.py
@@ -6,6 +6,8 @@ from pyatv import (const, convert, exceptions)
# These are extracted from iTunes, see for instance:
# http://www.blooming.no/wp-content/uploads/2013/03/ITLibMediaItem.h
+# and also this:
+# https://github.com/melloware/dacp-net/blob/master/Melloware.DACP/DACPResponse.cs
# Key: cmst.cmmk
MEDIA_KIND_UNKNOWN = 1
MEDIA_KIND_SONG = 2
@@ -28,7 +30,9 @@ MEDIA_KIND_ITUNESU = 18
MEDIA_KIND_BOOK = 19
MEDIA_KIND_PDFBOOK = 20
MEDIA_KIND_ALERTTONE = 21
-
+MEDIA_KIND_MUSICVIDEO2 = 32
+MEDIA_KIND_PODCAST2 = 36
+MEDIA_KIND_TVSHOW2 = 64
# Found on various places on the Internet as well as by testing
# Key: cmst.caps
@@ -52,6 +56,8 @@ class ConvertTest(unittest.TestCase):
convert.media_kind(MEDIA_KIND_MOVIE))
self.assertEqual(const.MEDIA_TYPE_VIDEO,
convert.media_kind(MEDIA_KIND_MUSICVIDEO))
+ self.assertEqual(const.MEDIA_TYPE_VIDEO,
+ convert.media_kind(MEDIA_KIND_MUSICVIDEO2))
self.assertEqual(const.MEDIA_TYPE_VIDEO,
convert.media_kind(MEDIA_KIND_VIDEOPASS))
self.assertEqual(const.MEDIA_TYPE_VIDEO,
@@ -66,6 +72,8 @@ class ConvertTest(unittest.TestCase):
convert.media_kind(MEDIA_KIND_SONG))
self.assertEqual(const.MEDIA_TYPE_MUSIC,
convert.media_kind(MEDIA_KIND_PODCAST))
+ self.assertEqual(const.MEDIA_TYPE_MUSIC,
+ convert.media_kind(MEDIA_KIND_PODCAST2))
self.assertEqual(const.MEDIA_TYPE_MUSIC,
convert.media_kind(MEDIA_KIND_COACHEDAUDIO))
self.assertEqual(const.MEDIA_TYPE_MUSIC,
@@ -78,6 +86,8 @@ class ConvertTest(unittest.TestCase):
def test_tv_kinds(self):
self.assertEqual(const.MEDIA_TYPE_TV,
convert.media_kind(MEDIA_KIND_TVSHOW))
+ self.assertEqual(const.MEDIA_TYPE_TV,
+ convert.media_kind(MEDIA_KIND_TVSHOW2))
def test_unknown_media_kind_throws(self):
with self.assertRaises(exceptions.UnknownMediaKind):
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 1
} | 0.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-aiohttp"
],
"pre_install": [
"apt-get update",
"apt-get install -y virtualenv python3-dev"
],
"python": "3.5",
"reqs_path": [
"requirements_test.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | aiohttp==3.7.4.post0
alabaster==0.7.13
astroid==2.11.7
async-timeout==3.0.1
asynctest==0.9.0
attrs==22.2.0
Babel==2.11.0
certifi==2021.5.30
chardet==4.0.0
charset-normalizer==2.0.12
coverage==6.2
coveralls==3.3.1
dill==0.3.4
docopt==0.6.2
docutils==0.18.1
flake8==5.0.4
idna==3.10
idna-ssl==1.1.0
ifaddr==0.2.0
imagesize==1.4.1
importlib-metadata==4.2.0
iniconfig==1.1.1
isort==5.10.1
Jinja2==3.0.3
lazy-object-proxy==1.7.1
MarkupSafe==2.0.1
mccabe==0.7.0
multidict==5.2.0
mypy-lang==0.5.0
packaging==21.3
platformdirs==2.4.0
pluggy==1.0.0
py==1.11.0
-e git+https://github.com/postlund/pyatv.git@8937e869c3ce7d86edad5d2c88d2139fe57c3127#egg=pyatv
pycodestyle==2.9.1
pydocstyle==6.3.0
pyflakes==2.5.0
Pygments==2.14.0
pylint==2.13.9
pyparsing==3.1.4
pytest==7.0.1
pytest-aiohttp==0.1.3
pytest-cov==4.0.0
pytest-timeout==2.1.0
pytz==2025.2
requests==2.27.1
six==1.17.0
snowballstemmer==2.2.0
Sphinx==1.5.1
tomli==1.2.3
typed-ast==1.5.5
typing_extensions==4.1.1
urllib3==1.26.20
wrapt==1.16.0
yarl==1.7.2
zeroconf==0.38.4
zipp==3.6.0
| name: pyatv
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- aiohttp==3.7.4.post0
- alabaster==0.7.13
- astroid==2.11.7
- async-timeout==3.0.1
- asynctest==0.9.0
- attrs==22.2.0
- babel==2.11.0
- chardet==4.0.0
- charset-normalizer==2.0.12
- coverage==6.2
- coveralls==3.3.1
- dill==0.3.4
- docopt==0.6.2
- docutils==0.18.1
- flake8==5.0.4
- idna==3.10
- idna-ssl==1.1.0
- ifaddr==0.2.0
- imagesize==1.4.1
- importlib-metadata==4.2.0
- iniconfig==1.1.1
- isort==5.10.1
- jinja2==3.0.3
- lazy-object-proxy==1.7.1
- markupsafe==2.0.1
- mccabe==0.7.0
- multidict==5.2.0
- mypy-lang==0.5.0
- packaging==21.3
- platformdirs==2.4.0
- pluggy==1.0.0
- py==1.11.0
- pycodestyle==2.9.1
- pydocstyle==6.3.0
- pyflakes==2.5.0
- pygments==2.14.0
- pylint==2.13.9
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-aiohttp==0.1.3
- pytest-cov==4.0.0
- pytest-timeout==2.1.0
- pytz==2025.2
- requests==2.27.1
- six==1.17.0
- snowballstemmer==2.2.0
- sphinx==1.5.1
- tomli==1.2.3
- typed-ast==1.5.5
- typing-extensions==4.1.1
- urllib3==1.26.20
- wrapt==1.16.0
- yarl==1.7.2
- zeroconf==0.38.4
- zipp==3.6.0
prefix: /opt/conda/envs/pyatv
| [
"tests/test_convert.py::ConvertTest::test_music_media_kinds",
"tests/test_convert.py::ConvertTest::test_tv_kinds",
"tests/test_convert.py::ConvertTest::test_video_media_kinds"
]
| []
| [
"tests/test_convert.py::ConvertTest::test_invalid_time",
"tests/test_convert.py::ConvertTest::test_media_type_to_string",
"tests/test_convert.py::ConvertTest::test_no_time_returns_zero",
"tests/test_convert.py::ConvertTest::test_play_state_no_media",
"tests/test_convert.py::ConvertTest::test_playstate_str",
"tests/test_convert.py::ConvertTest::test_regular_playstates",
"tests/test_convert.py::ConvertTest::test_time_in_seconds",
"tests/test_convert.py::ConvertTest::test_unknown_media_kind",
"tests/test_convert.py::ConvertTest::test_unknown_media_kind_throws",
"tests/test_convert.py::ConvertTest::test_unknown_media_type_to_str_throws",
"tests/test_convert.py::ConvertTest::test_unknown_playstate_throws",
"tests/test_convert.py::ConvertTest::test_unsupported_playstate_str"
]
| []
| MIT License | 1,079 | [
"pyatv/convert.py"
]
| [
"pyatv/convert.py"
]
|
cherrypy__cherrypy-1578 | 8c635f55c3634b722260fef501ee49fb440be3ac | 2017-03-11 14:37:20 | 2c5643367147bae270e83dfba25c1897f37dbe18 | codacy-bot:
 Here is an overview of what got changed by this pull request:
```diff
Complexity increasing per file
==============================
- cherrypy/_cperror.py 8
```
See the complete overview on [Codacy](https://www.codacy.com/app/cherrypy/cherrypy/pullRequest?prid=554915&bid=4292032)
mar10: Concerning the tests, it would go something like
```py
class RedirectApp(object):
def index(self, **kwargs):
raise cherrypy.HTTPRedirect("example.com?redir=<script>alert(1);</script>")
index.exposed = True
cherrypy.quickstart(RedirectApp())
```
and
```py
import requests
res = requests.get("http://127.0.0.1:8080?redir=<abc>", allow_redirects=False)
assert res.status_code == 303
assert '<' not in res.content
```
But I don't know where and how to add it.
webknjaz: Regarding tests, please check [`cherrypy/test/test_request_obj.py`](https://github.com/cherrypy/cherrypy/blob/master/cherrypy/test/test_request_obj.py), I don't see tests for 3xx status codes, but it should be relatively easy to add them there :)
P.S. Please note `getPage()`, `assertStatus()` and `assertBody()` helpers.
mar10: Looking at the code I would not consider this change harmful and we already invested quite a bit of time on this one-liner ;)
But I'll see what I can do (may take a while until I find time though). | diff --git a/cherrypy/_cperror.py b/cherrypy/_cperror.py
index 6c952e91..b597c645 100644
--- a/cherrypy/_cperror.py
+++ b/cherrypy/_cperror.py
@@ -271,7 +271,7 @@ class HTTPRedirect(CherryPyException):
307: 'This resource has moved temporarily to ',
}[status]
msg += '<a href=%s>%s</a>.'
- msgs = [msg % (saxutils.quoteattr(u), u) for u in self.urls]
+ msgs = [msg % (saxutils.quoteattr(u), escape_html(u)) for u in self.urls]
response.body = ntob('<br />\n'.join(msgs), 'utf-8')
# Previous code may have set C-L, so we have to reset it
# (allow finalize to set it).
| Encode URLs in redirect responses
* **I'm submitting a ...**
[x] bug report
[ ] feature request
[ ] question about the decisions made in the repository
* **What is the current behavior?**
The standard response page for 30x errors contains the target URL as `href` as well as clear text.
* **What is the expected behavior?**
Even though browsers *mostly* will not display the result, but redirect instead, you never know:
Any input from unknown sources should be html-escaped.
* **Please tell us about your environment:**
CherryPy 3.8 until 10.1
| cherrypy/cherrypy | diff --git a/cherrypy/test/test_core.py b/cherrypy/test/test_core.py
index 2e590a9d..f16efd58 100644
--- a/cherrypy/test/test_core.py
+++ b/cherrypy/test/test_core.py
@@ -150,6 +150,9 @@ class CoreRequestHandlingTest(helper.CPWebCase):
def url_with_quote(self):
raise cherrypy.HTTPRedirect("/some\"url/that'we/want")
+ def url_with_xss(self):
+ raise cherrypy.HTTPRedirect("/some<script>alert(1);</script>url/that'we/want")
+
def url_with_unicode(self):
raise cherrypy.HTTPRedirect(ntou('тест', 'utf-8'))
@@ -435,6 +438,13 @@ class CoreRequestHandlingTest(helper.CPWebCase):
self.assertStatus(303)
assertValidXHTML()
+ def test_redirect_with_xss(self):
+ """A redirect to a URL with HTML injected should result in page contents escaped."""
+ self.getPage('/redirect/url_with_xss')
+ self.assertStatus(303)
+ assert b'<script>' not in self.body
+ assert b'<script>' in self.body
+
def test_redirect_with_unicode(self):
"""
A redirect to a URL with Unicode should return a Location
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 1
} | 10.2 | {
"env_vars": null,
"env_yml_path": [],
"install": "pip install -e .[testing]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "",
"pip_packages": [],
"pre_install": [],
"python": "3.9",
"reqs_path": [],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | backports.unittest-mock==1.5.1
cheroot==10.0.1
-e git+https://github.com/cherrypy/cherrypy.git@8c635f55c3634b722260fef501ee49fb440be3ac#egg=CherryPy
coverage==7.8.0
exceptiongroup==1.2.2
iniconfig==2.1.0
jaraco.functools==4.1.0
more-itertools==10.6.0
nose==1.3.7
nose-testconfig==0.10
objgraph==3.6.2
packaging==24.2
pluggy==1.5.0
portend==3.2.0
pytest==8.3.5
python-dateutil==2.9.0.post0
six==1.17.0
tempora==5.8.0
tomli==2.2.1
| name: cherrypy
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- backports-unittest-mock==1.5.1
- cheroot==10.0.1
- coverage==7.8.0
- exceptiongroup==1.2.2
- iniconfig==2.1.0
- jaraco-functools==4.1.0
- more-itertools==10.6.0
- nose==1.3.7
- nose-testconfig==0.10
- objgraph==3.6.2
- packaging==24.2
- pluggy==1.5.0
- portend==3.2.0
- pytest==8.3.5
- python-dateutil==2.9.0.post0
- six==1.17.0
- tempora==5.8.0
- tomli==2.2.1
prefix: /opt/conda/envs/cherrypy
| [
"cherrypy/test/test_core.py::CoreRequestHandlingTest::test_redirect_with_xss"
]
| []
| [
"cherrypy/test/test_core.py::CoreRequestHandlingTest::testCookies",
"cherrypy/test/test_core.py::CoreRequestHandlingTest::testDefaultContentType",
"cherrypy/test/test_core.py::CoreRequestHandlingTest::testFavicon",
"cherrypy/test/test_core.py::CoreRequestHandlingTest::testFlatten",
"cherrypy/test/test_core.py::CoreRequestHandlingTest::testRanges",
"cherrypy/test/test_core.py::CoreRequestHandlingTest::testRedirect",
"cherrypy/test/test_core.py::CoreRequestHandlingTest::testSlashes",
"cherrypy/test/test_core.py::CoreRequestHandlingTest::testStatus",
"cherrypy/test/test_core.py::CoreRequestHandlingTest::test_InternalRedirect",
"cherrypy/test/test_core.py::CoreRequestHandlingTest::test_cherrypy_url",
"cherrypy/test/test_core.py::CoreRequestHandlingTest::test_expose_decorator",
"cherrypy/test/test_core.py::CoreRequestHandlingTest::test_multiple_headers",
"cherrypy/test/test_core.py::CoreRequestHandlingTest::test_on_end_resource_status",
"cherrypy/test/test_core.py::CoreRequestHandlingTest::test_redirect_with_unicode",
"cherrypy/test/test_core.py::CoreRequestHandlingTest::test_gc",
"cherrypy/test/test_core.py::ErrorTests::test_contextmanager",
"cherrypy/test/test_core.py::ErrorTests::test_start_response_error",
"cherrypy/test/test_core.py::ErrorTests::test_gc",
"cherrypy/test/test_core.py::TestBinding::test_bind_ephemeral_port"
]
| []
| BSD 3-Clause "New" or "Revised" License | 1,080 | [
"cherrypy/_cperror.py"
]
| [
"cherrypy/_cperror.py"
]
|
planetarypy__pvl-25 | a483fee7b9b658bb0c22586dc1ab87753439b998 | 2017-03-11 21:45:30 | 0fc3b804f214170b28f94bd5b3703f439c968f92 | diff --git a/pvl/_collections.py b/pvl/_collections.py
index afeab25..2abaf3a 100644
--- a/pvl/_collections.py
+++ b/pvl/_collections.py
@@ -34,6 +34,13 @@ class KeysView(MappingView):
for key, _ in self._mapping:
yield key
+ def __getitem__(self, index):
+ return self._mapping[index][0]
+
+ def __repr__(self):
+ keys = [key for key, _ in self._mapping]
+ return '%s(%r)' % (type(self).__name__, keys)
+
class ItemsView(MappingView):
def __contains__(self, item):
@@ -44,6 +51,9 @@ class ItemsView(MappingView):
for item in self._mapping:
yield item
+ def __getitem__(self, index):
+ return self._mapping[index]
+
class ValuesView(MappingView):
def __contains__(self, value):
@@ -56,6 +66,13 @@ class ValuesView(MappingView):
for _, value in self._mapping:
yield value
+ def __getitem__(self, index):
+ return self._mapping[index][1]
+
+ def __repr__(self):
+ values = [value for _, value in self._mapping]
+ return '%s(%r)' % (type(self).__name__, values)
+
class OrderedMultiDict(dict, MutableMapping):
"""A ``dict`` like container.
| items is not subscriptable in python3
As pointed out in #23, in python3 ``items()`` returns an [ItemsView](https://github.com/planetarypy/pvl/blob/master/pvl/_collections.py#L38) object which is not subscriptable
```
In [1]: import pvl
In [2]: label = pvl.loads("""
...: foo = bar
...: monty = python
...: """)
In [3]: items = label.items()
In [4]: items[0]
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-4-95f461411437> in <module>()
----> 1 items[0]
TypeError: 'ItemsView' object does not support indexing
```
But we should get something similar to:
```
In [4]: items[0]
Out [4]: ('foo', 'bar')
``` | planetarypy/pvl | diff --git a/tests/test_collections.py b/tests/test_collections.py
index a77f9dc..50df73d 100644
--- a/tests/test_collections.py
+++ b/tests/test_collections.py
@@ -368,6 +368,39 @@ def test_py2_items():
assert module.values() == [1, 2, 3]
[email protected](six.PY2, reason='requires python3')
+def test_py3_items():
+ module = pvl.PVLModule()
+
+ assert isinstance(module.items(), pvl._collections.ItemsView)
+ with pytest.raises(IndexError):
+ module.items()[0]
+
+ assert isinstance(module.keys(), pvl._collections.KeysView)
+ with pytest.raises(IndexError):
+ module.keys()[0]
+
+ assert isinstance(module.values(), pvl._collections.ValuesView)
+ with pytest.raises(IndexError):
+ module.values()[0]
+
+ module = pvl.PVLModule([
+ ('a', 1),
+ ('b', 2),
+ ('a', 3),
+ ])
+
+ assert isinstance(module.items(), pvl._collections.ItemsView)
+ assert module.items()[0] == ('a', 1)
+
+ assert isinstance(module.keys(), pvl._collections.KeysView)
+ assert module.keys()[0] == 'a'
+
+ assert isinstance(module.values(), pvl._collections.ValuesView)
+ assert module.values()[0] == 1
+
+
+
if six.PY3:
def iteritems(module):
return module.items()
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_issue_reference"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 0,
"test_score": 3
},
"num_modified_files": 1
} | 0.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"flake8",
"pytest",
"pytest-cov",
"tox",
"wheel"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
coverage==6.2
distlib==0.3.9
filelock==3.4.1
flake8==5.0.4
importlib-metadata==4.2.0
importlib-resources==5.4.0
iniconfig==1.1.1
mccabe==0.7.0
packaging==21.3
platformdirs==2.4.0
pluggy==1.0.0
-e git+https://github.com/planetarypy/pvl.git@a483fee7b9b658bb0c22586dc1ab87753439b998#egg=pvl
py==1.11.0
pycodestyle==2.9.1
pyflakes==2.5.0
pyparsing==3.1.4
pytest==7.0.1
pytest-cov==4.0.0
pytz==2025.2
six==1.17.0
toml==0.10.2
tomli==1.2.3
tox==3.28.0
typing_extensions==4.1.1
virtualenv==20.16.2
zipp==3.6.0
| name: pvl
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- coverage==6.2
- distlib==0.3.9
- filelock==3.4.1
- flake8==5.0.4
- importlib-metadata==4.2.0
- importlib-resources==5.4.0
- iniconfig==1.1.1
- mccabe==0.7.0
- packaging==21.3
- platformdirs==2.4.0
- pluggy==1.0.0
- py==1.11.0
- pycodestyle==2.9.1
- pyflakes==2.5.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-cov==4.0.0
- pytz==2025.2
- six==1.17.0
- toml==0.10.2
- tomli==1.2.3
- tox==3.28.0
- typing-extensions==4.1.1
- virtualenv==20.16.2
- zipp==3.6.0
prefix: /opt/conda/envs/pvl
| [
"tests/test_collections.py::test_py3_items"
]
| [
"tests/test_collections.py::test_conversion"
]
| [
"tests/test_collections.py::test_empty",
"tests/test_collections.py::test_list_creation",
"tests/test_collections.py::test_dict_creation",
"tests/test_collections.py::test_keyword_creation",
"tests/test_collections.py::test_key_access",
"tests/test_collections.py::test_index_access",
"tests/test_collections.py::test_slice_access",
"tests/test_collections.py::test_set",
"tests/test_collections.py::test_delete",
"tests/test_collections.py::test_clear",
"tests/test_collections.py::test_discard",
"tests/test_collections.py::test_pop",
"tests/test_collections.py::test_popitem",
"tests/test_collections.py::test_update",
"tests/test_collections.py::test_append",
"tests/test_collections.py::test_len",
"tests/test_collections.py::test_repr",
"tests/test_collections.py::test_iterators",
"tests/test_collections.py::test_equlity",
"tests/test_collections.py::test_copy"
]
| []
| BSD 3-Clause "New" or "Revised" License | 1,081 | [
"pvl/_collections.py"
]
| [
"pvl/_collections.py"
]
|
|
santoshphilip__eppy-148 | 0c4d92cdb82974cd9fedacc6d1273b7a4deb03fe | 2017-03-11 22:32:46 | 48e1e1caaba29eafe1c7d1c911f44731064d8e22 | diff --git a/eppy/bunch_subclass.py b/eppy/bunch_subclass.py
index 012d7f3..a15ce60 100644
--- a/eppy/bunch_subclass.py
+++ b/eppy/bunch_subclass.py
@@ -34,7 +34,7 @@ class RangeError(ValueError):
def almostequal(first, second, places=7, printit=True):
"""
Test if two values are equal to a given number of places.
- This is based on python's unittest so may be covered by Python's
+ This is based on python's unittest so may be covered by Python's
license.
"""
@@ -59,30 +59,30 @@ def extendlist(lst, i, value=''):
pass
else:
lst.extend([value, ] * (i - len(lst) + 1))
-
+
def return42(self, *args, **kwargs):
# proof of concept - to be removed
- return 42
+ return 42
def addfunctions(abunch):
"""add functions to epbunch"""
# proof of concept - remove
- abunch['__functions'].update({'return42':return42})
- abunch['__functions'].update({'buildingname':fh.buildingname})
+ abunch['__functions'].update({'return42':return42})
+ abunch['__functions'].update({'buildingname':fh.buildingname})
# proof of concept
-
+
key = abunch.obj[0].upper()
-
+
#-----------------
# TODO : alternate strategy to avoid listing the objkeys in snames
# check if epbunch has field "Zone_Name" or "Building_Surface_Name"
# and is in group u'Thermal Zones and Surfaces'
# then it is likely to be a surface.
# of course we need to recode for surfaces that do not have coordinates :-(
- # or we can filter those out since they do not have
+ # or we can filter those out since they do not have
# the field "Number_of_Vertices"
snames = [
"BuildingSurface:Detailed",
@@ -129,8 +129,8 @@ def addfunctions(abunch):
func_dict = {
'rvalue': fh.rvalue,
'ufactor': fh.ufactor,
- 'rvalue_ip': fh.rvalue_ip,# quick fix for Santosh. Needs to thought thru
- 'ufactor_ip': fh.ufactor_ip,# quick fix for Santosh. Needs to thought thru
+ 'rvalue_ip': fh.rvalue_ip, # quick fix for Santosh. Needs to thought thru
+ 'ufactor_ip': fh.ufactor_ip, # quick fix for Santosh. Needs to thought thru
'heatcapacity': fh.heatcapacity,
}
abunch.__functions.update(func_dict)
@@ -146,13 +146,13 @@ def addfunctions(abunch):
#-----------------
# add function subsurfaces
# going to cheat here a bit
- # check if epbunch has field "Zone_Name"
+ # check if epbunch has field "Zone_Name"
# and is in group u'Thermal Zones and Surfaces'
# then it is likely to be a surface attached to a zone
fields = abunch.fieldnames
try:
group = abunch.getfieldidd('key')['group']
- except KeyError as e: # some pytests don't have group
+ except KeyError as e: # some pytests don't have group
group = None
if group == u'Thermal Zones and Surfaces':
if "Zone_Name" in fields:
@@ -163,22 +163,22 @@ def addfunctions(abunch):
class EpBunch(Bunch):
"""
- Fields, values, and descriptions of fields in an EnergyPlus IDF object
- stored in a `bunch` which is a `dict` extended to allow access to dict
+ Fields, values, and descriptions of fields in an EnergyPlus IDF object
+ stored in a `bunch` which is a `dict` extended to allow access to dict
fields as attributes as well as by keys.
-
+
"""
def __init__(self, obj, objls, objidd, *args, **kwargs):
super(EpBunch, self).__init__(*args, **kwargs)
- self.obj = obj # field names
- self.objls = objls # field values
+ self.obj = obj # field names
+ self.objls = objls # field values
self.objidd = objidd # field metadata (minimum, maximum, type, etc.)
- self.theidf = None # pointer to the idf this epbunch belongs to
- # This is None if there is no idf - a standalone epbunch
+ self.theidf = None # pointer to the idf this epbunch belongs to
+ # This is None if there is no idf - a standalone epbunch
# This will be set by Idf_MSequence
- self['__functions'] = {} # initialize the functions
+ self['__functions'] = {} # initialize the functions
addfunctions(self)
-
+
@property
def fieldnames(self):
"""Friendly name for objls.
@@ -189,70 +189,70 @@ class EpBunch(Bunch):
def fieldvalues(self):
"""Friendly name for obj.
"""
- return self.obj
-
+ return self.obj
+
def checkrange(self, fieldname):
"""Check if the value for a field is within the allowed range.
"""
return checkrange(self, fieldname)
-
+
def getrange(self, fieldname):
"""Get the allowed range of values for a field.
"""
return getrange(self, fieldname)
-
+
def getfieldidd(self, fieldname):
"""get the idd dict for this field
Will return {} if the fieldname does not exist"""
return getfieldidd(self, fieldname)
-
+
def getfieldidd_item(self, fieldname, iddkey):
"""return an item from the fieldidd, given the iddkey
will return and empty list if it does not have the iddkey
or if the fieldname does not exist"""
return getfieldidd_item(self, fieldname, iddkey)
-
+
def get_retaincase(self, fieldname):
"""check if the field should retain case"""
return get_retaincase(self, fieldname)
-
+
def isequal(self, fieldname, value, places=7):
"""return True if the field == value
Will retain case if get_retaincase == True
for real value will compare to decimal 'places'
"""
return isequal(self, fieldname, value, places=places)
-
+
def getreferingobjs(self, iddgroups=None, fields=None):
"""Get a list of objects that refer to this object"""
- return getreferingobjs(self, iddgroups=iddgroups, fields=fields)
-
+ return getreferingobjs(self, iddgroups=iddgroups, fields=fields)
+
def get_referenced_object(self, fieldname):
"""
Get an object referred to by a field in another object.
-
+
For example an object of type Construction has fields for each layer, each
- of which refers to a Material. This functions allows the object
+ of which refers to a Material. This functions allows the object
representing a Material to be fetched using the name of the layer.
-
+
Returns the first item found since if there is more than one matching item,
it is a malformed IDF.
-
+
Parameters
----------
referring_object : EpBunch
The object which contains a reference to another object,
fieldname : str
- The name of the field in the referring object which contains the
+ The name of the field in the referring object which contains the
reference to another object.
-
+
Returns
-------
EpBunch
-
+
"""
return get_referenced_object(self, fieldname)
-
+
def __setattr__(self, name, value):
try:
origname = self['__functions'][name]
@@ -280,9 +280,9 @@ class EpBunch(Bunch):
extendlist(self.fieldvalues, i)
self.fieldvalues[i] = value
else:
- astr = "unable to find field %s" % (name, )
+ astr = "unable to find field %s" % (name,)
raise BadEPFieldError(astr) # TODO: could raise AttributeError
-
+
def __getattr__(self, name):
try:
func = self['__functions'][name]
@@ -307,11 +307,11 @@ class EpBunch(Bunch):
except IndexError:
return ''
else:
- astr = "unable to find field %s" % (name, )
+ astr = "unable to find field %s" % (name,)
raise BadEPFieldError(astr)
-
+
def __getitem__(self, key):
- if key in ('obj', 'objls', 'objidd',
+ if key in ('obj', 'objls', 'objidd',
'__functions', '__aliases', 'theidf'):
return super(EpBunch, self).__getitem__(key)
elif key in self.fieldnames:
@@ -321,11 +321,11 @@ class EpBunch(Bunch):
except IndexError:
return ''
else:
- astr = "unknown field %s" % (key, )
+ astr = "unknown field %s" % (key,)
raise BadEPFieldError(astr)
-
+
def __setitem__(self, key, value):
- if key in ('obj', 'objls', 'objidd',
+ if key in ('obj', 'objls', 'objidd',
'__functions', '__aliases', 'theidf'):
super(EpBunch, self).__setitem__(key, value)
return None
@@ -337,25 +337,25 @@ class EpBunch(Bunch):
extendlist(self.fieldvalues, i)
self.fieldvalues[i] = value
else:
- astr = "unknown field %s" % (key, )
+ astr = "unknown field %s" % (key,)
raise BadEPFieldError(astr)
def __repr__(self):
"""print this as an idf snippet"""
lines = [str(val) for val in self.obj]
comments = [comm.replace('_', ' ') for comm in self.objls]
- lines[0] = "%s," % (lines[0], ) # comma after first line
+ lines[0] = "%s," % (lines[0],) # comma after first line
for i, line in enumerate(lines[1:-1]):
- lines[i + 1] = ' %s,' % (line, ) # indent and comma
- lines[-1] = ' %s;' % (lines[-1], )# ';' after last line
- lines = [line.ljust(26) for line in lines] # ljsut the lines
+ lines[i + 1] = ' %s,' % (line,) # indent and comma
+ lines[-1] = ' %s;' % (lines[-1],) # ';' after last line
+ lines = lines[:1] + [line.ljust(26) for line in lines[1:]] # ljsut the lines
filler = '%s !- %s'
nlines = [filler % (line, comm) for line,
- comm in zip(lines[1:], comments[1:])]# adds comments to line
- nlines.insert(0, lines[0])# first line without comment
+ comm in zip(lines[1:], comments[1:])] # adds comments to line
+ nlines.insert(0, lines[0]) # first line without comment
astr = '\n'.join(nlines)
- return '\n%s\n' % (astr, )
-
+ return '\n%s\n' % (astr,)
+
def __str__(self):
"""same as __repr__"""
# needed if YAML is installed. See issue 67
@@ -366,7 +366,7 @@ class EpBunch(Bunch):
fnames = self.fieldnames
func_names = self['__functions'].keys()
return super(EpBunch, self).__dir__() + fnames + func_names
-
+
def getrange(bch, fieldname):
"""get the ranges for this field"""
@@ -417,7 +417,7 @@ def checkrange(bch, fieldname):
return fieldvalue
"""get the idd dict for this field
Will return {} if the fieldname does not exist"""
-
+
def getfieldidd(bch, fieldname):
"""get the idd dict for this field
Will return {} if the fieldname does not exist"""
@@ -425,11 +425,11 @@ def getfieldidd(bch, fieldname):
try:
fieldindex = bch.objls.index(fieldname)
except ValueError as e:
- return {} # the fieldname does not exist
+ return {} # the fieldname does not exist
# so there is no idd
fieldidd = bch.objidd[fieldindex]
return fieldidd
-
+
def getfieldidd_item(bch, fieldname, iddkey):
"""return an item from the fieldidd, given the iddkey
will return and empty list if it does not have the iddkey
@@ -439,14 +439,14 @@ def getfieldidd_item(bch, fieldname, iddkey):
return fieldidd[iddkey]
except KeyError as e:
return []
-
-
+
+
def get_retaincase(bch, fieldname):
"""Check if the field should retain case"""
fieldidd = bch.getfieldidd(fieldname)
return 'retaincase' in fieldidd
-
-
+
+
def isequal(bch, fieldname, value, places=7):
"""return True if the field is equal to value"""
def equalalphanumeric(bch, fieldname, value):
@@ -454,7 +454,7 @@ def isequal(bch, fieldname, value, places=7):
return bch[fieldname] == value
else:
return bch[fieldname].upper() == value.upper()
-
+
fieldidd = bch.getfieldidd(fieldname)
try:
ftype = fieldidd['type'][0]
@@ -464,7 +464,7 @@ def isequal(bch, fieldname, value, places=7):
return equalalphanumeric(bch, fieldname, value)
except KeyError as e:
return equalalphanumeric(bch, fieldname, value)
-
+
def getreferingobjs(referedobj, iddgroups=None, fields=None):
"""Get a list of objects that refer to this object"""
@@ -485,8 +485,8 @@ def getreferingobjs(referedobj, iddgroups=None, fields=None):
referedidd = referedobj.getfieldidd("Name")
references = referedidd['reference']
idfobjs = idf.idfobjects.values()
- idfobjs = list(itertools.chain.from_iterable(idfobjs)) # flatten list
- if iddgroups: # optional filter
+ idfobjs = list(itertools.chain.from_iterable(idfobjs)) # flatten list
+ if iddgroups: # optional filter
idfobjs = [anobj for anobj in idfobjs
if anobj.getfieldidd('key')['group'] in iddgroups]
for anobj in idfobjs:
@@ -510,22 +510,22 @@ def getreferingobjs(referedobj, iddgroups=None, fields=None):
def get_referenced_object(referring_object, fieldname):
"""
Get an object referred to by a field in another object.
-
+
For example an object of type Construction has fields for each layer, each
- of which refers to a Material. This functions allows the object
+ of which refers to a Material. This functions allows the object
representing a Material to be fetched using the name of the layer.
-
+
Returns the first item found since if there is more than one matching item,
it is a malformed IDF.
-
+
Parameters
----------
referring_object : EpBunch
The object which contains a reference to another object,
fieldname : str
- The name of the field in the referring object which contains the
+ The name of the field in the referring object which contains the
reference to another object.
-
+
Returns
-------
EpBunch
@@ -540,6 +540,6 @@ def get_referenced_object(referring_object, fieldname):
referenced_obj_name = referring_object[fieldname]
if obj.Name == referenced_obj_name:
return obj
-
-
+
+
diff --git a/eppy/hvacbuilder.py b/eppy/hvacbuilder.py
index cea040b..eec8c6f 100644
--- a/eppy/hvacbuilder.py
+++ b/eppy/hvacbuilder.py
@@ -13,9 +13,8 @@ from __future__ import unicode_literals
import copy
-from eppy.modeleditor import IDF
-
import eppy.bunch_subclass as bunch_subclass
+from eppy.modeleditor import IDF
import eppy.modeleditor as modeleditor
from six.moves import xrange
@@ -57,43 +56,43 @@ def flattencopy(lst):
# http://stackoverflow.com/questions/2158395/flatten-an-irregular-list-of-lists-in-python
thelist = copy.deepcopy(lst)
list_is_nested = True
- while list_is_nested: #outer loop
+ while list_is_nested: # outer loop
keepchecking = False
atemp = []
- for element in thelist: #inner loop
+ for element in thelist: # inner loop
if isinstance(element, list):
atemp.extend(element)
keepchecking = True
else:
atemp.append(element)
- list_is_nested = keepchecking #determine if outer loop exits
+ list_is_nested = keepchecking # determine if outer loop exits
thelist = atemp[:]
return thelist
def makepipecomponent(idf, pname):
"""make a pipe component
generate inlet outlet names"""
- apipe = idf.newidfobject("Pipe:Adiabatic".upper(), pname)
- apipe.Inlet_Node_Name = "%s_inlet" % (pname, )
- apipe.Outlet_Node_Name = "%s_outlet" % (pname, )
+ apipe = idf.newidfobject("Pipe:Adiabatic".upper(), Name=pname)
+ apipe.Inlet_Node_Name = "%s_inlet" % (pname,)
+ apipe.Outlet_Node_Name = "%s_outlet" % (pname,)
return apipe
def makeductcomponent(idf, dname):
"""make a duct component
generate inlet outlet names"""
- aduct = idf.newidfobject("duct".upper(), dname)
- aduct.Inlet_Node_Name = "%s_inlet" % (dname, )
- aduct.Outlet_Node_Name = "%s_outlet" % (dname, )
+ aduct = idf.newidfobject("duct".upper(), Name=dname)
+ aduct.Inlet_Node_Name = "%s_inlet" % (dname,)
+ aduct.Outlet_Node_Name = "%s_outlet" % (dname,)
return aduct
def makepipebranch(idf, bname):
"""make a branch with a pipe
use standard inlet outlet names"""
# make the pipe component first
- pname = "%s_pipe" % (bname, )
+ pname = "%s_pipe" % (bname,)
apipe = makepipecomponent(idf, pname)
# now make the branch with the pipe in it
- abranch = idf.newidfobject("BRANCH", bname)
+ abranch = idf.newidfobject("BRANCH", Name=bname)
abranch.Component_1_Object_Type = 'Pipe:Adiabatic'
abranch.Component_1_Name = pname
abranch.Component_1_Inlet_Node_Name = apipe.Inlet_Node_Name
@@ -105,10 +104,10 @@ def makeductbranch(idf, bname):
"""make a branch with a duct
use standard inlet outlet names"""
# make the duct component first
- pname = "%s_duct" % (bname, )
+ pname = "%s_duct" % (bname,)
aduct = makeductcomponent(idf, pname)
# now make the branch with the duct in it
- abranch = idf.newidfobject("BRANCH", bname)
+ abranch = idf.newidfobject("BRANCH", Name=bname)
abranch.Component_1_Object_Type = 'duct'
abranch.Component_1_Name = pname
abranch.Component_1_Inlet_Node_Name = aduct.Inlet_Node_Name
@@ -123,10 +122,10 @@ def getbranchcomponents(idf, branch, utest=False):
complist = []
for i in range(1, 100000):
try:
- objtype = branch[fobjtype % (i, )]
+ objtype = branch[fobjtype % (i,)]
if objtype.strip() == '':
break
- objname = branch[fobjname % (i, )]
+ objname = branch[fobjname % (i,)]
complist.append((objtype, objname))
except bunch_subclass.BadEPFieldError:
break
@@ -231,7 +230,7 @@ def initinletoutlet(idf, idfobject, thisnode, force=False):
return True
else:
return False
- except AttributeError: # field may be a list
+ except AttributeError: # field may be a list
return False
def trimfields(fields, thisnode):
if len(fields) > 1:
@@ -249,12 +248,12 @@ def initinletoutlet(idf, idfobject, thisnode, force=False):
return fields
inletfields = getfieldnamesendswith(idfobject, "Inlet_Node_Name")
- inletfields = trimfields(inletfields, thisnode) # or warn with exception
+ inletfields = trimfields(inletfields, thisnode) # or warn with exception
for inletfield in inletfields:
if blankfield(idfobject[inletfield]) == True or force == True:
idfobject[inletfield] = "%s_%s" % (idfobject.Name, inletfield)
outletfields = getfieldnamesendswith(idfobject, "Outlet_Node_Name")
- outletfields = trimfields(outletfields, thisnode) # or warn with exception
+ outletfields = trimfields(outletfields, thisnode) # or warn with exception
for outletfield in outletfields:
if blankfield(idfobject[outletfield]) == True or force == True:
idfobject[outletfield] = "%s_%s" % (idfobject.Name, outletfield)
@@ -271,12 +270,12 @@ def componentsintobranch(idf, branch, listofcomponents, fluid=None):
# assumes that the nodes of the component connect to each other
# empty branch if it has existing components
thebranchname = branch.Name
- thebranch = idf.removeextensibles('BRANCH', thebranchname) # empty the branch
+ thebranch = idf.removeextensibles('BRANCH', thebranchname) # empty the branch
# fill in the new components with the node names into this branch
# find the first extensible field and fill in the data in obj.
e_index = idf.getextensibleindex('BRANCH', thebranchname)
theobj = thebranch.obj
- modeleditor.extendlist(theobj, e_index) # just being careful here
+ modeleditor.extendlist(theobj, e_index) # just being careful here
for comp, compnode in listofcomponents:
theobj.append(comp.key)
theobj.append(comp.Name)
@@ -308,7 +307,7 @@ def makeairloop(idf, loopname, sloop, dloop, testing=None):
# -------- testing ---------
testn = 0
# -------- testing ---------
- newairloop = idf.newidfobject("AirLoopHVAC".upper(), loopname)
+ newairloop = idf.newidfobject("AirLoopHVAC".upper(), Name=loopname)
# -------- testing ---------
testn = doingtesting(testing, testn, newairloop)
if testn == None:
@@ -340,7 +339,7 @@ def makeairloop(idf, loopname, sloop, dloop, testing=None):
# make the branch lists for this air loop
sbranchlist = idf.newidfobject("BRANCHLIST",
- newairloop[flnames[0]])
+ Name=newairloop[flnames[0]])
# -------- testing ---------
testn = doingtesting(testing, testn, newairloop)
@@ -369,10 +368,10 @@ def makeairloop(idf, loopname, sloop, dloop, testing=None):
# -------- testing ---------
# rename inlet outlet of endpoints of loop
anode = "Component_1_Inlet_Node_Name"
- sameinnode = "Supply_Side_Inlet_Node_Name" # TODO : change ?
+ sameinnode = "Supply_Side_Inlet_Node_Name" # TODO : change ?
sbranchs[0][anode] = newairloop[sameinnode]
anode = "Component_1_Outlet_Node_Name"
- sameoutnode = "Supply_Side_Outlet_Node_Names" # TODO : change ?
+ sameoutnode = "Supply_Side_Outlet_Node_Names" # TODO : change ?
sbranchs[-1][anode] = newairloop[sameoutnode]
# -------- testing ---------
testn = doingtesting(testing, testn, newairloop)
@@ -380,11 +379,11 @@ def makeairloop(idf, loopname, sloop, dloop, testing=None):
returnnone()
# -------- testing ---------
# rename inlet outlet of endpoints of loop - rename in pipe
- dname = sbranchs[0]['Component_1_Name'] # get the duct name
- aduct = idf.getobject('duct'.upper(), dname) # get duct
+ dname = sbranchs[0]['Component_1_Name'] # get the duct name
+ aduct = idf.getobject('duct'.upper(), dname) # get duct
aduct.Inlet_Node_Name = newairloop[sameinnode]
- dname = sbranchs[-1]['Component_1_Name'] # get the duct name
- aduct = idf.getobject('duct'.upper(), dname) # get duct
+ dname = sbranchs[-1]['Component_1_Name'] # get the duct name
+ aduct = idf.getobject('duct'.upper(), dname) # get duct
aduct.Outlet_Node_Name = newairloop[sameoutnode]
# -------- testing ---------
testn = doingtesting(testing, testn, newairloop)
@@ -395,16 +394,16 @@ def makeairloop(idf, loopname, sloop, dloop, testing=None):
# # TODO : test if there are parallel branches
# make the connectorlist an fill fields
sconnlist = idf.newidfobject("CONNECTORLIST",
- newairloop.Connector_List_Name)
+ Name=newairloop.Connector_List_Name)
# -------- testing ---------
testn = doingtesting(testing, testn, newairloop)
if testn == None:
returnnone()
# -------- testing ---------
sconnlist.Connector_1_Object_Type = "Connector:Splitter"
- sconnlist.Connector_1_Name = "%s_supply_splitter" % (loopname, )
+ sconnlist.Connector_1_Name = "%s_supply_splitter" % (loopname,)
sconnlist.Connector_2_Object_Type = "Connector:Mixer"
- sconnlist.Connector_2_Name = "%s_supply_mixer" % (loopname, )
+ sconnlist.Connector_2_Name = "%s_supply_mixer" % (loopname,)
# -------- testing ---------
testn = doingtesting(testing, testn, newairloop)
if testn == None:
@@ -412,10 +411,10 @@ def makeairloop(idf, loopname, sloop, dloop, testing=None):
# -------- testing ---------
# make splitters and mixers
s_splitter = idf.newidfobject("CONNECTOR:SPLITTER",
- sconnlist.Connector_1_Name)
+ Name=sconnlist.Connector_1_Name)
s_splitter.obj.extend([sloop[0]] + sloop[1])
s_mixer = idf.newidfobject("CONNECTOR:MIXER",
- sconnlist.Connector_2_Name)
+ Name=sconnlist.Connector_2_Name)
s_mixer.obj.extend([sloop[-1]] + sloop[1])
# -------- testing ---------
testn = doingtesting(testing, testn, newairloop)
@@ -423,18 +422,18 @@ def makeairloop(idf, loopname, sloop, dloop, testing=None):
returnnone()
# -------- testing ---------
# demand side loop for airloop is made below
- #ZoneHVAC:EquipmentConnections
+ # ZoneHVAC:EquipmentConnections
for zone in dloop:
equipconn = idf.newidfobject("ZoneHVAC:EquipmentConnections".upper())
equipconn.Zone_Name = zone
fldname = "Zone_Conditioning_Equipment_List_Name"
- equipconn[fldname] = "%s equip list" % (zone, )
+ equipconn[fldname] = "%s equip list" % (zone,)
fldname = "Zone_Air_Inlet_Node_or_NodeList_Name"
- equipconn[fldname] = "%s Inlet Node" % (zone, )
+ equipconn[fldname] = "%s Inlet Node" % (zone,)
fldname = "Zone_Air_Node_Name"
- equipconn[fldname] = "%s Node" % (zone, )
+ equipconn[fldname] = "%s Node" % (zone,)
fldname = "Zone_Return_Air_Node_Name"
- equipconn[fldname] = "%s Outlet Node" % (zone, )
+ equipconn[fldname] = "%s Outlet Node" % (zone,)
# -------- testing ---------
testn = doingtesting(testing, testn, newairloop)
if testn == None:
@@ -446,12 +445,12 @@ def makeairloop(idf, loopname, sloop, dloop, testing=None):
z_equipconn = modeleditor.getobjects(
idf.idfobjects,
idf.model, idf.idd_info,
- "ZoneHVAC:EquipmentConnections".upper(), #places=7,
+ "ZoneHVAC:EquipmentConnections".upper(), # places=7,
**dict(Zone_Name=zone))[0]
z_equiplst.Name = z_equipconn.Zone_Conditioning_Equipment_List_Name
fld = "Zone_Equipment_1_Object_Type"
z_equiplst[fld] = "AirTerminal:SingleDuct:Uncontrolled"
- z_equiplst.Zone_Equipment_1_Name = "%sDirectAir" % (zone, )
+ z_equiplst.Zone_Equipment_1_Name = "%sDirectAir" % (zone,)
z_equiplst.Zone_Equipment_1_Cooling_Sequence = 1
z_equiplst.Zone_Equipment_1_Heating_or_NoLoad_Sequence = 1
# -------- testing ---------
@@ -464,11 +463,11 @@ def makeairloop(idf, loopname, sloop, dloop, testing=None):
z_equipconn = modeleditor.getobjects(
idf.idfobjects,
idf.model, idf.idd_info,
- "ZoneHVAC:EquipmentConnections".upper(), #places=7,
+ "ZoneHVAC:EquipmentConnections".upper(), # places=7,
**dict(Zone_Name=zone))[0]
key = "AirTerminal:SingleDuct:Uncontrolled".upper()
z_airterm = idf.newidfobject(key)
- z_airterm.Name = "%sDirectAir" % (zone, )
+ z_airterm.Name = "%sDirectAir" % (zone,)
fld1 = "Zone_Supply_Air_Node_Name"
fld2 = "Zone_Air_Inlet_Node_or_NodeList_Name"
z_airterm[fld1] = z_equipconn[fld2]
@@ -487,15 +486,15 @@ def makeairloop(idf, loopname, sloop, dloop, testing=None):
if testn == None:
returnnone()
# -------- testing ---------
- z_splitter.Name = "%s Demand Side Splitter" % (loopname, )
+ z_splitter.Name = "%s Demand Side Splitter" % (loopname,)
z_splitter.Inlet_Node_Name = newairloop.Demand_Side_Inlet_Node_Names
for i, zone in enumerate(dloop):
z_equipconn = modeleditor.getobjects(
idf.idfobjects,
idf.model, idf.idd_info,
- "ZoneHVAC:EquipmentConnections".upper(), #places=7,
+ "ZoneHVAC:EquipmentConnections".upper(), # places=7,
**dict(Zone_Name=zone))[0]
- fld = "Outlet_%s_Node_Name" % (i + 1, )
+ fld = "Outlet_%s_Node_Name" % (i + 1,)
z_splitter[fld] = z_equipconn.Zone_Air_Inlet_Node_or_NodeList_Name
# -------- testing ---------
testn = doingtesting(testing, testn, newairloop)
@@ -505,7 +504,7 @@ def makeairloop(idf, loopname, sloop, dloop, testing=None):
# make AirLoopHVAC:SupplyPath
key = "AirLoopHVAC:SupplyPath".upper()
z_supplypth = idf.newidfobject(key)
- z_supplypth.Name = "%sSupplyPath" % (loopname, )
+ z_supplypth.Name = "%sSupplyPath" % (loopname,)
# -------- testing ---------
testn = doingtesting(testing, testn, newairloop)
if testn == None:
@@ -529,7 +528,7 @@ def makeairloop(idf, loopname, sloop, dloop, testing=None):
if testn == None:
returnnone()
# -------- testing ---------
- z_mixer.Name = "%s Demand Side Mixer" % (loopname, )
+ z_mixer.Name = "%s Demand Side Mixer" % (loopname,)
# -------- testing ---------
testn = doingtesting(testing, testn, newairloop)
if testn == None:
@@ -545,9 +544,9 @@ def makeairloop(idf, loopname, sloop, dloop, testing=None):
z_equipconn = modeleditor.getobjects(
idf.idfobjects,
idf.model, idf.idd_info,
- "ZoneHVAC:EquipmentConnections".upper(), #places=7,
+ "ZoneHVAC:EquipmentConnections".upper(), # places=7,
**dict(Zone_Name=zone))[0]
- fld = "Inlet_%s_Node_Name" % (i + 1, )
+ fld = "Inlet_%s_Node_Name" % (i + 1,)
z_mixer[fld] = z_equipconn.Zone_Return_Air_Node_Name
# -------- testing ---------
testn = doingtesting(testing, testn, newairloop)
@@ -562,7 +561,7 @@ def makeairloop(idf, loopname, sloop, dloop, testing=None):
if testn == None:
returnnone()
# -------- testing ---------
- z_returnpth.Name = "%sReturnPath" % (loopname, )
+ z_returnpth.Name = "%sReturnPath" % (loopname,)
z_returnpth.Return_Air_Path_Outlet_Node_Name = newairloop.Demand_Side_Outlet_Node_Name
z_returnpth.Component_1_Object_Type = "AirLoopHVAC:ZoneMixer"
z_returnpth.Component_1_Name = z_mixer.Name
@@ -578,7 +577,7 @@ def makeplantloop(idf, loopname, sloop, dloop, testing=None):
# -------- <testing ---------
testn = 0
# -------- testing> ---------
- newplantloop = idf.newidfobject("PLANTLOOP", loopname)
+ newplantloop = idf.newidfobject("PLANTLOOP", Name=loopname)
# -------- <testing ---------
testn = doingtesting(testing, testn, newplantloop)
if testn == None:
@@ -610,7 +609,7 @@ def makeplantloop(idf, loopname, sloop, dloop, testing=None):
# make the branch lists for this plant loop
sbranchlist = idf.newidfobject(
"BRANCHLIST",
- newplantloop.Plant_Side_Branch_List_Name)
+ Name=newplantloop.Plant_Side_Branch_List_Name)
# -------- <testing ---------
testn = doingtesting(testing, testn, newplantloop)
if testn == None:
@@ -618,7 +617,7 @@ def makeplantloop(idf, loopname, sloop, dloop, testing=None):
# -------- testing> ---------
dbranchlist = idf.newidfobject(
"BRANCHLIST",
- newplantloop.Demand_Side_Branch_List_Name)
+ Name=newplantloop.Demand_Side_Branch_List_Name)
# -------- <testing ---------
testn = doingtesting(testing, testn, newplantloop)
if testn == None:
@@ -669,11 +668,11 @@ def makeplantloop(idf, loopname, sloop, dloop, testing=None):
returnnone()
# -------- testing> ---------
# rename inlet outlet of endpoints of loop - rename in pipe
- pname = sbranchs[0]['Component_1_Name'] # get the pipe name
- apipe = idf.getobject('Pipe:Adiabatic'.upper(), pname) # get pipe
+ pname = sbranchs[0]['Component_1_Name'] # get the pipe name
+ apipe = idf.getobject('Pipe:Adiabatic'.upper(), pname) # get pipe
apipe.Inlet_Node_Name = newplantloop[sameinnode]
- pname = sbranchs[-1]['Component_1_Name'] # get the pipe name
- apipe = idf.getobject('Pipe:Adiabatic'.upper(), pname) # get pipe
+ pname = sbranchs[-1]['Component_1_Name'] # get the pipe name
+ apipe = idf.getobject('Pipe:Adiabatic'.upper(), pname) # get pipe
apipe.Outlet_Node_Name = newplantloop[sameoutnode]
# -------- <testing ---------
testn = doingtesting(testing, testn, newplantloop)
@@ -704,11 +703,11 @@ def makeplantloop(idf, loopname, sloop, dloop, testing=None):
returnnone()
# -------- testing> ---------
# rename inlet outlet of endpoints of loop - rename in pipe
- pname = dbranchs[0]['Component_1_Name'] # get the pipe name
- apipe = idf.getobject('Pipe:Adiabatic'.upper(), pname) # get pipe
+ pname = dbranchs[0]['Component_1_Name'] # get the pipe name
+ apipe = idf.getobject('Pipe:Adiabatic'.upper(), pname) # get pipe
apipe.Inlet_Node_Name = newplantloop[sameinnode]
- pname = dbranchs[-1]['Component_1_Name'] # get the pipe name
- apipe = idf.getobject('Pipe:Adiabatic'.upper(), pname) # get pipe
+ pname = dbranchs[-1]['Component_1_Name'] # get the pipe name
+ apipe = idf.getobject('Pipe:Adiabatic'.upper(), pname) # get pipe
apipe.Outlet_Node_Name = newplantloop[sameoutnode]
# -------- <testing ---------
testn = doingtesting(testing, testn, newplantloop)
@@ -721,18 +720,18 @@ def makeplantloop(idf, loopname, sloop, dloop, testing=None):
# make the connectorlist an fill fields
sconnlist = idf.newidfobject(
"CONNECTORLIST",
- newplantloop.Plant_Side_Connector_List_Name)
+ Name=newplantloop.Plant_Side_Connector_List_Name)
sconnlist.Connector_1_Object_Type = "Connector:Splitter"
- sconnlist.Connector_1_Name = "%s_supply_splitter" % (loopname, )
+ sconnlist.Connector_1_Name = "%s_supply_splitter" % (loopname,)
sconnlist.Connector_2_Object_Type = "Connector:Mixer"
- sconnlist.Connector_2_Name = "%s_supply_mixer" % (loopname, )
+ sconnlist.Connector_2_Name = "%s_supply_mixer" % (loopname,)
dconnlist = idf.newidfobject(
"CONNECTORLIST",
- newplantloop.Demand_Side_Connector_List_Name)
+ Name=newplantloop.Demand_Side_Connector_List_Name)
dconnlist.Connector_1_Object_Type = "Connector:Splitter"
- dconnlist.Connector_1_Name = "%s_demand_splitter" % (loopname, )
+ dconnlist.Connector_1_Name = "%s_demand_splitter" % (loopname,)
dconnlist.Connector_2_Object_Type = "Connector:Mixer"
- dconnlist.Connector_2_Name = "%s_demand_mixer" % (loopname, )
+ dconnlist.Connector_2_Name = "%s_demand_mixer" % (loopname,)
# -------- <testing ---------
testn = doingtesting(testing, testn, newplantloop)
if testn == None:
@@ -742,20 +741,20 @@ def makeplantloop(idf, loopname, sloop, dloop, testing=None):
# make splitters and mixers
s_splitter = idf.newidfobject(
"CONNECTOR:SPLITTER",
- sconnlist.Connector_1_Name)
+ Name=sconnlist.Connector_1_Name)
s_splitter.obj.extend([sloop[0]] + sloop[1])
s_mixer = idf.newidfobject(
"CONNECTOR:MIXER",
- sconnlist.Connector_2_Name)
+ Name=sconnlist.Connector_2_Name)
s_mixer.obj.extend([sloop[-1]] + sloop[1])
# -
d_splitter = idf.newidfobject(
"CONNECTOR:SPLITTER",
- dconnlist.Connector_1_Name)
+ Name=dconnlist.Connector_1_Name)
d_splitter.obj.extend([dloop[0]] + dloop[1])
d_mixer = idf.newidfobject(
"CONNECTOR:MIXER",
- dconnlist.Connector_2_Name)
+ Name=dconnlist.Connector_2_Name)
d_mixer.obj.extend([dloop[-1]] + dloop[1])
# -------- <testing ---------
testn = doingtesting(testing, testn, newplantloop)
@@ -770,7 +769,7 @@ def makecondenserloop(idf, loopname, sloop, dloop, testing=None):
# -------- <testing ---------
testn = 0
# -------- testing> ---------
- newcondenserloop = idf.newidfobject("CondenserLoop".upper(), loopname)
+ newcondenserloop = idf.newidfobject("CondenserLoop".upper(), Name=loopname)
# -------- <testing ---------
testn = doingtesting(testing, testn, newcondenserloop)
if testn == None:
@@ -805,10 +804,10 @@ def makecondenserloop(idf, loopname, sloop, dloop, testing=None):
# make the branch lists for this condenser loop
sbranchlist = idf.newidfobject(
"BRANCHLIST",
- newcondenserloop.Condenser_Side_Branch_List_Name)
+ Name=newcondenserloop.Condenser_Side_Branch_List_Name)
dbranchlist = idf.newidfobject(
"BRANCHLIST",
- newcondenserloop.Condenser_Demand_Side_Branch_List_Name)
+ Name=newcondenserloop.Condenser_Demand_Side_Branch_List_Name)
# -------- <testing ---------
testn = doingtesting(testing, testn, newcondenserloop)
if testn == None:
@@ -844,10 +843,10 @@ def makecondenserloop(idf, loopname, sloop, dloop, testing=None):
# -------- testing> ---------
# rename inlet outlet of endpoints of loop
anode = "Component_1_Inlet_Node_Name"
- sameinnode = "Condenser_Side_Inlet_Node_Name" # TODO : change ?
+ sameinnode = "Condenser_Side_Inlet_Node_Name" # TODO : change ?
sbranchs[0][anode] = newcondenserloop[sameinnode]
anode = "Component_1_Outlet_Node_Name"
- sameoutnode = "Condenser_Side_Outlet_Node_Name" # TODO : change ?
+ sameoutnode = "Condenser_Side_Outlet_Node_Name" # TODO : change ?
sbranchs[-1][anode] = newcondenserloop[sameoutnode]
# -------- <testing ---------
testn = doingtesting(testing, testn, newcondenserloop)
@@ -855,11 +854,11 @@ def makecondenserloop(idf, loopname, sloop, dloop, testing=None):
returnnone()
# -------- testing> ---------
# rename inlet outlet of endpoints of loop - rename in pipe
- pname = sbranchs[0]['Component_1_Name'] # get the pipe name
- apipe = idf.getobject('Pipe:Adiabatic'.upper(), pname) # get pipe
+ pname = sbranchs[0]['Component_1_Name'] # get the pipe name
+ apipe = idf.getobject('Pipe:Adiabatic'.upper(), pname) # get pipe
apipe.Inlet_Node_Name = newcondenserloop[sameinnode]
- pname = sbranchs[-1]['Component_1_Name'] # get the pipe name
- apipe = idf.getobject('Pipe:Adiabatic'.upper(), pname) # get pipe
+ pname = sbranchs[-1]['Component_1_Name'] # get the pipe name
+ apipe = idf.getobject('Pipe:Adiabatic'.upper(), pname) # get pipe
apipe.Outlet_Node_Name = newcondenserloop[sameoutnode]
# -------- <testing ---------
testn = doingtesting(testing, testn, newcondenserloop)
@@ -879,10 +878,10 @@ def makecondenserloop(idf, loopname, sloop, dloop, testing=None):
# -------- testing> ---------
# rename inlet outlet of endpoints of loop - rename in branch
anode = "Component_1_Inlet_Node_Name"
- sameinnode = "Demand_Side_Inlet_Node_Name" # TODO : change ?
+ sameinnode = "Demand_Side_Inlet_Node_Name" # TODO : change ?
dbranchs[0][anode] = newcondenserloop[sameinnode]
anode = "Component_1_Outlet_Node_Name"
- sameoutnode = "Demand_Side_Outlet_Node_Name" # TODO : change ?
+ sameoutnode = "Demand_Side_Outlet_Node_Name" # TODO : change ?
dbranchs[-1][anode] = newcondenserloop[sameoutnode]
# -------- <testing ---------
testn = doingtesting(testing, testn, newcondenserloop)
@@ -890,11 +889,11 @@ def makecondenserloop(idf, loopname, sloop, dloop, testing=None):
returnnone()
# -------- testing> ---------
# rename inlet outlet of endpoints of loop - rename in pipe
- pname = dbranchs[0]['Component_1_Name'] # get the pipe name
- apipe = idf.getobject('Pipe:Adiabatic'.upper(), pname) # get pipe
+ pname = dbranchs[0]['Component_1_Name'] # get the pipe name
+ apipe = idf.getobject('Pipe:Adiabatic'.upper(), pname) # get pipe
apipe.Inlet_Node_Name = newcondenserloop[sameinnode]
- pname = dbranchs[-1]['Component_1_Name'] # get the pipe name
- apipe = idf.getobject('Pipe:Adiabatic'.upper(), pname) # get pipe
+ pname = dbranchs[-1]['Component_1_Name'] # get the pipe name
+ apipe = idf.getobject('Pipe:Adiabatic'.upper(), pname) # get pipe
apipe.Outlet_Node_Name = newcondenserloop[sameoutnode]
# -------- <testing ---------
testn = doingtesting(testing, testn, newcondenserloop)
@@ -907,11 +906,11 @@ def makecondenserloop(idf, loopname, sloop, dloop, testing=None):
# make the connectorlist an fill fields
sconnlist = idf.newidfobject(
"CONNECTORLIST",
- newcondenserloop.Condenser_Side_Connector_List_Name)
+ Name=newcondenserloop.Condenser_Side_Connector_List_Name)
sconnlist.Connector_1_Object_Type = "Connector:Splitter"
- sconnlist.Connector_1_Name = "%s_supply_splitter" % (loopname, )
+ sconnlist.Connector_1_Name = "%s_supply_splitter" % (loopname,)
sconnlist.Connector_2_Object_Type = "Connector:Mixer"
- sconnlist.Connector_2_Name = "%s_supply_mixer" % (loopname, )
+ sconnlist.Connector_2_Name = "%s_supply_mixer" % (loopname,)
# -------- <testing ---------
testn = doingtesting(testing, testn, newcondenserloop)
if testn == None:
@@ -919,11 +918,11 @@ def makecondenserloop(idf, loopname, sloop, dloop, testing=None):
# -------- testing> ---------
dconnlist = idf.newidfobject(
"CONNECTORLIST",
- newcondenserloop.Condenser_Demand_Side_Connector_List_Name)
+ Name=newcondenserloop.Condenser_Demand_Side_Connector_List_Name)
dconnlist.Connector_1_Object_Type = "Connector:Splitter"
- dconnlist.Connector_1_Name = "%s_demand_splitter" % (loopname, )
+ dconnlist.Connector_1_Name = "%s_demand_splitter" % (loopname,)
dconnlist.Connector_2_Object_Type = "Connector:Mixer"
- dconnlist.Connector_2_Name = "%s_demand_mixer" % (loopname, )
+ dconnlist.Connector_2_Name = "%s_demand_mixer" % (loopname,)
# -------- <testing ---------
testn = doingtesting(testing, testn, newcondenserloop)
if testn == None:
@@ -933,11 +932,11 @@ def makecondenserloop(idf, loopname, sloop, dloop, testing=None):
# make splitters and mixers
s_splitter = idf.newidfobject(
"CONNECTOR:SPLITTER",
- sconnlist.Connector_1_Name)
+ Name=sconnlist.Connector_1_Name)
s_splitter.obj.extend([sloop[0]] + sloop[1])
s_mixer = idf.newidfobject(
"CONNECTOR:MIXER",
- sconnlist.Connector_2_Name)
+ Name=sconnlist.Connector_2_Name)
s_mixer.obj.extend([sloop[-1]] + sloop[1])
# -------- <testing ---------
testn = doingtesting(testing, testn, newcondenserloop)
@@ -947,11 +946,11 @@ def makecondenserloop(idf, loopname, sloop, dloop, testing=None):
# -
d_splitter = idf.newidfobject(
"CONNECTOR:SPLITTER",
- dconnlist.Connector_1_Name)
+ Name=dconnlist.Connector_1_Name)
d_splitter.obj.extend([dloop[0]] + dloop[1])
d_mixer = idf.newidfobject(
"CONNECTOR:MIXER",
- dconnlist.Connector_2_Name)
+ Name=dconnlist.Connector_2_Name)
d_mixer.obj.extend([dloop[-1]] + dloop[1])
# -------- <testing ---------
testn = doingtesting(testing, testn, newcondenserloop)
@@ -984,7 +983,7 @@ def getmakeidfobject(idf, key, name):
"""get idfobject or make it if it does not exist"""
idfobject = idf.getobject(key, name)
if not idfobject:
- return idf.newidfobject(key, name)
+ return idf.newidfobject(key, Name=name)
else:
return idfobject
@@ -994,7 +993,7 @@ def replacebranch1(idf, loop, branchname, listofcomponents_tuples, fluid=None,
if fluid is None:
fluid = ''
listofcomponents_tuples = _clean_listofcomponents_tuples(listofcomponents_tuples)
- branch = idf.getobject('BRANCH', branchname) # args are (key, name)
+ branch = idf.getobject('BRANCH', branchname) # args are (key, name)
listofcomponents = []
for comp_type, comp_name, compnode in listofcomponents_tuples:
comp = getmakeidfobject(idf, comp_type.upper(), comp_name)
@@ -1069,17 +1068,17 @@ def replacebranch(idf, loop, branch,
supplyconlistname = loop[flnames[3]]
# Plant_Side_Connector_List_Name or Condenser_Side_Connector_List_Name
elif fluid.upper() == 'AIR':
- supplyconlistname = loop[flnames[1]] # Connector_List_Name'
+ supplyconlistname = loop[flnames[1]] # Connector_List_Name'
supplyconlist = idf.getobject('CONNECTORLIST', supplyconlistname)
- for i in range(1, 100000): # large range to hit end
+ for i in range(1, 100000): # large range to hit end
try:
- fieldname = 'Connector_%s_Object_Type' % (i, )
+ fieldname = 'Connector_%s_Object_Type' % (i,)
ctype = supplyconlist[fieldname]
except bunch_subclass.BadEPFieldError:
break
if ctype.strip() == '':
break
- fieldname = 'Connector_%s_Name' % (i, )
+ fieldname = 'Connector_%s_Name' % (i,)
cname = supplyconlist[fieldname]
connector = idf.getobject(ctype.upper(), cname)
if connector.key == 'CONNECTOR:SPLITTER':
@@ -1100,7 +1099,7 @@ def replacebranch(idf, loop, branch,
"Inlet_Node_Name", fluid)
comp[inletnodename] = [
comp[inletnodename],
- loop[flnames[0]]] # Plant_Side_Inlet_Node_Name
+ loop[flnames[0]]] # Plant_Side_Inlet_Node_Name
else:
comp = comps[-1]
outletnodename = getnodefieldname(
@@ -1108,7 +1107,7 @@ def replacebranch(idf, loop, branch,
"Outlet_Node_Name", fluid)
comp[outletnodename] = [
comp[outletnodename],
- loop[flnames[1]]] # .Plant_Side_Outlet_Node_Name
+ loop[flnames[1]]] # .Plant_Side_Outlet_Node_Name
# -------- testing ---------
testn = doingtesting(testing, testn)
if testn == None:
@@ -1116,17 +1115,17 @@ def replacebranch(idf, loop, branch,
# -------- testing ---------
if fluid.upper() == 'WATER':
- demandconlistname = loop[flnames[7]] # .Demand_Side_Connector_List_Name
+ demandconlistname = loop[flnames[7]] # .Demand_Side_Connector_List_Name
demandconlist = idf.getobject('CONNECTORLIST', demandconlistname)
- for i in range(1, 100000): # large range to hit end
+ for i in range(1, 100000): # large range to hit end
try:
- fieldname = 'Connector_%s_Object_Type' % (i, )
+ fieldname = 'Connector_%s_Object_Type' % (i,)
ctype = demandconlist[fieldname]
except bunch_subclass.BadEPFieldError:
break
if ctype.strip() == '':
break
- fieldname = 'Connector_%s_Name' % (i, )
+ fieldname = 'Connector_%s_Name' % (i,)
cname = demandconlist[fieldname]
connector = idf.getobject(ctype.upper(), cname)
if connector.key == 'CONNECTOR:SPLITTER':
@@ -1147,7 +1146,7 @@ def replacebranch(idf, loop, branch,
"Inlet_Node_Name", fluid)
comp[inletnodename] = [
comp[inletnodename],
- loop[flnames[4]]] #.Demand_Side_Inlet_Node_Name
+ loop[flnames[4]]] # .Demand_Side_Inlet_Node_Name
if not isfirst:
comp = comps[-1]
outletnodename = getnodefieldname(
@@ -1155,7 +1154,7 @@ def replacebranch(idf, loop, branch,
"Outlet_Node_Name", fluid)
comp[outletnodename] = [
comp[outletnodename],
- loop[flnames[5]]] # .Demand_Side_Outlet_Node_Name
+ loop[flnames[5]]] # .Demand_Side_Outlet_Node_Name
# -------- testing ---------
testn = doingtesting(testing, testn)
diff --git a/eppy/modeleditor.py b/eppy/modeleditor.py
index d3bebb9..d848a53 100644
--- a/eppy/modeleditor.py
+++ b/eppy/modeleditor.py
@@ -1,4 +1,5 @@
# Copyright (c) 2012 Santosh Philip
+# Copyright (c) 2016 Jamie Bull
# =======================================================================
# Distributed under the MIT License.
# (See accompanying file LICENSE or copy at
@@ -15,16 +16,16 @@ import copy
import itertools
import os
import platform
+import warnings
-from eppy.iddcurrent import iddcurrent
-from eppy.idfreader import idfreader1
-from eppy.idfreader import makeabunch
-from py._log import warning
from six import StringIO
from six import iteritems
import eppy.EPlusInterfaceFunctions.iddgroups as iddgroups
import eppy.function_helpers as function_helpers
+from eppy.iddcurrent import iddcurrent
+from eppy.idfreader import idfreader1
+from eppy.idfreader import makeabunch
from eppy.runner.run_functions import run
from eppy.runner.run_functions import wrapped_help_text
@@ -56,7 +57,7 @@ class IDDAlreadySetError(Exception):
def almostequal(first, second, places=7, printit=True):
"""
Test if two values are equal to a given number of places.
- This is based on python's unittest so may be covered by Python's
+ This is based on python's unittest so may be covered by Python's
license.
"""
@@ -500,13 +501,13 @@ def refname2key(idf, refname):
class IDF(object):
"""
- The IDF class holds all the information about an EnergyPlus IDF.
+ The IDF class holds all the information about an EnergyPlus IDF.
Class attributes
---------------
iddname : str
Name of the IDD currently being used by eppy. As a class attribute, this
- is set for all IDFs which are currently being processed and cannot be
+ is set for all IDFs which are currently being processed and cannot be
changed for an individual IDF.
iddinfo : list
Comments and metadata about fields in the IDD.
@@ -577,7 +578,7 @@ class IDF(object):
pass
else:
if testing == False:
- errortxt = "IDD file is set to: %s" % (cls.iddname, )
+ errortxt = "IDD file is set to: %s" % (cls.iddname,)
raise IDDAlreadySetError(errortxt)
@classmethod
@@ -723,11 +724,11 @@ class IDF(object):
key : str
The type of IDF object. This must be in ALL_CAPS.
aname : str, deprecated
- This parameter is not used. It is left there for backward
+ This parameter is not used. It is left there for backward
compatibility.
**kwargs
Keyword arguments in the format `field=value` used to set the value
- of fields in the IDF object when it is created.
+ of fields in the IDF object when it is created.
Returns
-------
@@ -737,7 +738,7 @@ class IDF(object):
obj = newrawobject(self.model, self.idd_info, key)
abunch = obj2bunch(self.model, self.idd_info, obj)
if aname:
- warning.warn("The aname parameter should no longer be used.")
+ warnings.warn("The aname parameter should no longer be used.")
namebunch(abunch, aname)
self.idfobjects[key].append(abunch)
for k, v in list(kwargs.items()):
@@ -895,7 +896,7 @@ class IDF(object):
return astr
def save(self, filename=None, lineendings='default', encoding='latin-1'):
- """
+ """
Save the IDF as a text file with the optional filename passed, or with
the current idfname of the IDF.
@@ -982,7 +983,7 @@ class IDF(object):
"""
self.save(filename, lineendings, encoding)
-
+
@wrapped_help_text(run)
def run(self, **kwargs):
"""
@@ -1001,7 +1002,7 @@ class IDF(object):
run('in.idf', self.epw, **kwargs)
# remove in.idf
os.remove('in.idf')
-
+
def getiddgroupdict(self):
"""Return a idd group dictionary
sample: {'Plant-Condenser Loops': ['PlantLoop', 'CondenserLoop'],
@@ -1009,11 +1010,11 @@ class IDF(object):
['Controller:WaterCoil',
'Controller:OutdoorAir',
'Controller:MechanicalVentilation',
- 'AirLoopHVAC:ControllerList'],
+ 'AirLoopHVAC:ControllerList'],
...}
-
+
Returns
-------
- dict
+ dict
"""
return iddgroups.commdct2grouplist(self.idd_info)
diff --git a/eppy/runner/run_functions.py b/eppy/runner/run_functions.py
index a051330..6fba22c 100644
--- a/eppy/runner/run_functions.py
+++ b/eppy/runner/run_functions.py
@@ -20,7 +20,10 @@ from subprocess import CalledProcessError
from subprocess import check_call
import tempfile
-import multiprocessing as mp
+try:
+ import multiprocessing as mp
+except ImportError:
+ pass
try:
VERSION = os.environ["ENERGYPLUS_INSTALL_VERSION"] # used in CI files
@@ -89,10 +92,14 @@ def runIDFs(jobs_list, processors=1):
idf.saveas(idf_path)
processed_runs.append([[idf_path, epw], kwargs])
- pool = mp.Pool(processors)
- pool.map(multirunner, processed_runs)
- pool.close()
-
+ try:
+ pool = mp.Pool(processors)
+ pool.map(multirunner, processed_runs)
+ pool.close()
+ except NameError:
+ # multiprocessing not present so pass the jobs one at a time
+ for job in processed_runs:
+ multirunner([job])
shutil.rmtree("multi_runs", ignore_errors=True)
| Issues when importing eppy in Ghpython editor of Grasshopper
This post is cross-posted [here](https://unmethours.com/question/23188/importing-eppy-in-ghpython/)
I was trying to import eppy in the ghpython editor of Grasshopper, got the error:
```
Runtime error (InvalidOperationException): Unsupported param dictionary type: IronPython.Runtime.PythonDictionary
Traceback:
line 44, in initpkg, "C:\Python27\Lib\site-packages\py\_apipkg.py"
line 19, in <module>, "C:\Python27\Lib\site-packages\py\__init__.py"
line 22, in <module>, "C:\Python27\Lib\site-packages\eppy\modeleditor.py"
line 5, in script
```
The line 22 in my **modeleditor.py** is
> from py._log import warning
I tried to comment line 22 and line 740 out in **modeleditor.py**, yet got another error:
```
Runtime error (ImportException): No module named multiprocessing
Traceback:
line 23, in <module>, "C:\Python27\Lib\site-packages\eppy\runner\run_functions.py"
line 28, in <module>, "C:\Python27\Lib\site-packages\eppy\modeleditor.py"
line 5, in script
``` | santoshphilip/eppy | diff --git a/eppy/tests/test_bunch_subclass.py b/eppy/tests/test_bunch_subclass.py
index bcfd7c9..df6faf4 100644
--- a/eppy/tests/test_bunch_subclass.py
+++ b/eppy/tests/test_bunch_subclass.py
@@ -11,21 +11,20 @@ from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
-# This test is ugly because I have to send file names and not able to send file handles
-
import pytest
from six import StringIO
-
from eppy.EPlusInterfaceFunctions import readidf
-import eppy.bunchhelpers as bunchhelpers
import eppy.bunch_subclass as bunch_subclass
+import eppy.bunchhelpers as bunchhelpers
+from eppy.iddcurrent import iddcurrent
+import eppy.idfreader as idfreader
from eppy.modeleditor import IDF
+# This test is ugly because I have to send file names and not able to send file handles
EpBunch = bunch_subclass.EpBunch
-from eppy.iddcurrent import iddcurrent
iddtxt = iddcurrent.iddtxt
# idd is read only once in this test
@@ -388,7 +387,7 @@ def test_EpBunch():
iddfile = StringIO(iddtxt)
fname = StringIO(idftxt)
- block, data, commdct, idd_index = readidf.readdatacommdct1(fname,
+ block, data, commdct, idd_index = readidf.readdatacommdct1(fname,
iddfile=iddfile)
# setup code walls - can be generic for any object
@@ -516,10 +515,10 @@ def test_EpBunch():
def test_extendlist():
"""py.test for extendlist"""
data = (
- ([1, 2, 3], 2, 0, [1, 2, 3]), # lst, i, value, nlst
- ([1, 2, 3], 3, 0, [1, 2, 3, 0]), # lst, i, value, nlst
- ([1, 2, 3], 5, 0, [1, 2, 3, 0, 0, 0]), # lst, i, value, nlst
- ([1, 2, 3], 7, 0, [1, 2, 3, 0, 0, 0, 0, 0]), # lst, i, value, nlst
+ ([1, 2, 3], 2, 0, [1, 2, 3]), # lst, i, value, nlst
+ ([1, 2, 3], 3, 0, [1, 2, 3, 0]), # lst, i, value, nlst
+ ([1, 2, 3], 5, 0, [1, 2, 3, 0, 0, 0]), # lst, i, value, nlst
+ ([1, 2, 3], 7, 0, [1, 2, 3, 0, 0, 0, 0, 0]), # lst, i, value, nlst
)
for lst, i, value, nlst in data:
bunch_subclass.extendlist(lst, i, value=value)
@@ -529,7 +528,7 @@ class TestEpBunch(object):
"""
py.test for EpBunch.getrange, EpBunch.checkrange, EpBunch.fieldnames,
EpBunch.fieldvalues, EpBunch.getidd.
-
+
"""
def initdata(self):
obj, objls, objidd = (
@@ -542,7 +541,7 @@ class TestEpBunch(object):
0.4,
'FullExterior',
25,
- 6], #obj
+ 6], # obj
[
'key',
@@ -592,29 +591,29 @@ class TestEpBunch(object):
'type': ['real']},
])
return obj, objls, objidd
-
+
def test_fieldnames(self):
"""
Test that the contents of idfobject.fieldnames are the same as those
of objls.
-
+
"""
obj, objls, objidd = self.initdata()
idfobject = EpBunch(obj, objls, objidd)
for fn_item, objls_item in zip(idfobject.fieldnames, idfobject.objls):
assert fn_item == objls_item
-
+
def test_fieldvalues(self):
"""
Test that the contents of idfobject.fieldvalues are the same as those
of obj.
-
+
"""
obj, objls, objidd = self.initdata()
idfobject = EpBunch(obj, objls, objidd)
for fv_item, objls_item in zip(idfobject.fieldvalues, idfobject.obj):
assert fv_item == objls_item
-
+
def test_getrange(self):
data = (
@@ -622,12 +621,12 @@ class TestEpBunch(object):
"Loads_Convergence_Tolerance_Value",
{
'maximum': .5, 'minimum>': 0.0, 'maximum<':None,
- 'minimum':None, 'type': 'real'},), # fieldname, theranges
+ 'minimum':None, 'type': 'real'},), # fieldname, theranges
(
"Maximum_Number_of_Warmup_Days",
{
- 'maximum': None, 'minimum>': -3, 'maximum<':5,
- 'minimum':None, 'type': 'integer'},), # fieldname, theranges
+ 'maximum': None, 'minimum>':-3, 'maximum<':5,
+ 'minimum':None, 'type': 'integer'},), # fieldname, theranges
)
obj, objls, objidd = self.initdata()
idfobject = EpBunch(obj, objls, objidd)
@@ -647,10 +646,10 @@ class TestEpBunch(object):
5, False, None),
# fieldname, fieldvalue, isexception, theexception
("Minimum_Number_of_Warmup_Days",
- -3, False, None),
+ - 3, False, None),
# fieldname, fieldvalue, isexception, theexception
("Minimum_Number_of_Warmup_Days",
- -4, True, bunch_subclass.RangeError),
+ - 4, True, bunch_subclass.RangeError),
# fieldname, fieldvalue, isexception, theexception
# -
("Maximum_Number_of_Warmup_Days",
@@ -660,7 +659,7 @@ class TestEpBunch(object):
5, True, bunch_subclass.RangeError),
# fieldname, fieldvalue, isexception, theexception
("Maximum_Number_of_Warmup_Days",
- -3, True, bunch_subclass.RangeError),
+ - 3, True, bunch_subclass.RangeError),
# fieldname, fieldvalue, isexception, theexception
("Loads_Convergence_Tolerance_Value",
0.3, False, bunch_subclass.RangeError),
@@ -698,7 +697,7 @@ class TestEpBunch(object):
assert result == {'type': ['real']}
result = idfobject.getfieldidd('No_such_field')
assert result == {}
-
+
def test_getfieldidd_item(self):
"""py.test for test_getfieldidd_item"""
obj, objls, objidd = self.initdata()
@@ -709,7 +708,7 @@ class TestEpBunch(object):
assert result == []
result = idfobject.getfieldidd_item('no_such_field', 'type')
assert result == []
-
+
def test_get_retaincase(self):
"""py.test for get_retaincase"""
obj, objls, objidd = self.initdata()
@@ -718,7 +717,7 @@ class TestEpBunch(object):
assert result == True
result = idfobject.get_retaincase('Terrain')
assert result == False
-
+
def test_isequal(self):
"""py.test for isequal"""
obj, objls, objidd = self.initdata()
@@ -752,7 +751,7 @@ class TestEpBunch(object):
assert result == True
result = idfobject.isequal('Maximum_Number_of_Warmup_Days', 25.00001)
assert result == False
-
+
def test_getreferingobjs(self):
"""py.test for getreferingobjs"""
thedata = ((
@@ -779,15 +778,15 @@ class TestEpBunch(object):
5.000000000000, !- Vertex 1 X-coordinate {m}
6.000000000000, !- Vertex 1 Y-coordinate {m}
3.000000000000; !- Vertex 1 Z-coordinate {m}
-
- WALL:EXTERIOR,
+
+ WALL:EXTERIOR,
WallExterior, !- Name
, !- Construction Name
Box, !- Zone Name
, !- Azimuth Angle
90; !- Tilt Angle
- BUILDINGSURFACE:DETAILED,
+ BUILDINGSURFACE:DETAILED,
EWall, !- Name
, !- Surface Type
, !- Construction Name
@@ -799,7 +798,7 @@ class TestEpBunch(object):
autocalculate, !- View Factor to Ground
autocalculate; !- Number of Vertices
- BUILDINGSURFACE:DETAILED,
+ BUILDINGSURFACE:DETAILED,
EWall1, !- Name
, !- Surface Type
, !- Construction Name
@@ -830,7 +829,7 @@ class TestEpBunch(object):
autocalculate; !- Number of Vertices
""",
'Box',
- ['N_Wall', 'EWall', 'WallExterior']), # idftxt, zname, surfnamelst
+ ['N_Wall', 'EWall', 'WallExterior']), # idftxt, zname, surfnamelst
)
for idftxt, zname, surfnamelst in thedata:
# import pdb; pdb.set_trace()
@@ -854,7 +853,7 @@ class TestEpBunch(object):
for idftxt, zname, surfnamelst in thedata:
idf = IDF(StringIO(idftxt))
zone = idf.getobject('zone'.upper(), zname)
- kwargs = {'fields':[u'Zone_Name', ],}
+ kwargs = {'fields':[u'Zone_Name', ], }
result = zone.getreferingobjs(**kwargs)
rnames = [item.Name for item in result]
rnames.sort()
@@ -883,41 +882,41 @@ class TestEpBunch(object):
rnames.sort()
surfnamelst.sort()
assert rnames == windownamelist
-
+
def test_get_referenced_object(self):
"""py.test for get_referenced_object"""
idf = IDF()
idf.initnew('test.idf')
- idf.newidfobject('VERSION') # does not have a field "Name"
+ idf.newidfobject('VERSION') # does not have a field "Name"
- # construction material
- construction = idf.newidfobject('CONSTRUCTION', 'construction')
+ # construction material
+ construction = idf.newidfobject('CONSTRUCTION', Name='construction')
construction.Outside_Layer = 'TestMaterial'
-
- expected = idf.newidfobject('MATERIAL', 'TestMaterial')
-
+
+ expected = idf.newidfobject('MATERIAL', Name='TestMaterial')
+
fetched = idf.getobject('MATERIAL', 'TestMaterial')
assert fetched == expected
-
+
material = construction.get_referenced_object('Outside_Layer')
assert material == expected
-
+
# window material
glazing_group = idf.newidfobject(
- 'WINDOWMATERIAL:GLAZINGGROUP:THERMOCHROMIC', 'glazing_group')
+ 'WINDOWMATERIAL:GLAZINGGROUP:THERMOCHROMIC', Name='glazing_group')
glazing_group.Window_Material_Glazing_Name_1 = 'TestWindowMaterial'
expected = idf.newidfobject(
- 'WINDOWMATERIAL:GLAZING', 'TestWindowMaterial') # has several \references
-
+ 'WINDOWMATERIAL:GLAZING', Name='TestWindowMaterial') # has several \references
+
fetched = idf.getobject('WINDOWMATERIAL:GLAZING', 'TestWindowMaterial')
assert fetched == expected
-
+
material = glazing_group.get_referenced_object(
'Window_Material_Glazing_Name_1')
assert material == expected
-
-
+
+
bldfidf = """
Version,
6.0;
@@ -950,13 +949,12 @@ BuildingSurface:Detailed,
"""
# test_EpBunch1()
# import idfreader
-import eppy.idfreader as idfreader
def test_EpBunch1():
"""py.test for EpBunch1"""
iddfile = StringIO(iddtxt)
idffile = StringIO(bldfidf)
- block, data, commdct, idd_index = readidf.readdatacommdct1(idffile,
+ block, data, commdct, idd_index = readidf.readdatacommdct1(idffile,
iddfile=iddfile)
key = "BUILDING"
objs = data.dt[key]
@@ -970,7 +968,7 @@ def test_EpBunch1():
assert bunchobj.Name == "Kutub Minar"
prnt = bunchobj.__repr__()
result = """
-BUILDING,
+BUILDING,
Kutub Minar, !- Name
30.0, !- North Axis
City, !- Terrain
diff --git a/eppy/tests/test_hvacbuilder.py b/eppy/tests/test_hvacbuilder.py
index f4c71d8..82fa39f 100644
--- a/eppy/tests/test_hvacbuilder.py
+++ b/eppy/tests/test_hvacbuilder.py
@@ -11,13 +11,15 @@ from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
+from six import StringIO
+
import eppy.hvacbuilder as hvacbuilder
+from eppy.iddcurrent import iddcurrent
from eppy.modeleditor import IDF
-from six import StringIO
+
# idd is read only once in this test
# if it has already been read from some other test, it will continue with the old reading
-from eppy.iddcurrent import iddcurrent
iddfhandle = StringIO(iddcurrent.iddtxt)
if IDF.getiddname() == None:
IDF.setiddname(iddfhandle)
@@ -26,11 +28,11 @@ if IDF.getiddname() == None:
def test_flattencopy():
"""py.test for flattencopy"""
tdata = (
- ([1, 2], [1, 2]), #lst , nlst
- ([1, 2, [3, 4]], [1, 2, 3, 4]), #lst , nlst
- ([1, 2, [3, [4, 5, 6], 7, 8]], [1, 2, 3, 4, 5, 6, 7, 8]), #lst , nlst
+ ([1, 2], [1, 2]), # lst , nlst
+ ([1, 2, [3, 4]], [1, 2, 3, 4]), # lst , nlst
+ ([1, 2, [3, [4, 5, 6], 7, 8]], [1, 2, 3, 4, 5, 6, 7, 8]), # lst , nlst
([1, 2, [3, [4, 5, [6, 7], 8], 9]], [1, 2, 3, 4, 5, 6, 7, 8, 9]),
- #lst , nlst
+ # lst , nlst
)
for lst, nlst in tdata:
result = hvacbuilder.flattencopy(lst)
@@ -82,7 +84,7 @@ def test_makeplantloop():
p_loop Supply Connectors, p_loop Demand Inlet, p_loop Demand Outlet,
p_loop Demand Branchs, p_loop Demand Connectors, Sequential, ,
SingleSetpoint, None, None;"""
- ), # blankidf, loopname, sloop, dloop, nidf
+ ), # blankidf, loopname, sloop, dloop, nidf
)
for blankidf, loopname, sloop, dloop, nidf in tdata:
fhandle = StringIO("")
@@ -142,7 +144,7 @@ def test_makecondenserloop():
c_loop Cond_Supply Connectors, c_loop Demand Inlet,
c_loop Demand Outlet, c_loop Condenser Demand Branchs,
c_loop Condenser Demand Connectors, Sequential, None; """
- ), # blankidf, loopname, sloop, dloop, nidf
+ ), # blankidf, loopname, sloop, dloop, nidf
)
for blankidf, loopname, sloop, dloop, nidf in tdata:
@@ -178,7 +180,7 @@ def test_getbranchcomponents():
[
('PIPE:ADIABATIC', 'np1'),
('PIPE:ADIABATIC', 'np2')
- ]), # idftxt, utest, componentlist
+ ]), # idftxt, utest, componentlist
(
"""BRANCH,
sb1,
@@ -260,7 +262,7 @@ def test_renamenodes():
fhandle = StringIO(idftxt)
idf = IDF(fhandle)
pipe = idf.idfobjects['PIPE:ADIABATIC'][0]
- pipe.Outlet_Node_Name = ['np1_outlet', 'np1_np2_node'] # this is the first step of the replace
+ pipe.Outlet_Node_Name = ['np1_outlet', 'np1_np2_node'] # this is the first step of the replace
hvacbuilder.renamenodes(idf, fieldtype='node')
outidf = IDF(StringIO(outtxt))
result = idf.idfobjects['PIPE:ADIABATIC'][0].obj
@@ -276,17 +278,17 @@ def test_getfieldnamesendswith():
"""
tdata = (
("Inlet_Node_Name", ["Inlet_Node_Name"]
- ), # endswith, fieldnames
+ ), # endswith, fieldnames
(
"Node_Name",
["Inlet_Node_Name",
- "Outlet_Node_Name"]), # endswith, fieldnames
+ "Outlet_Node_Name"]), # endswith, fieldnames
(
"Name",
[
"Name",
"Inlet_Node_Name",
- "Outlet_Node_Name"]), # endswith, fieldnames
+ "Outlet_Node_Name"]), # endswith, fieldnames
)
fhandle = StringIO(idftxt)
idf = IDF(fhandle)
@@ -316,7 +318,7 @@ def test_getnodefieldname():
for objtype, objname, endswith, fluid, nodefieldname in tdata:
fhandle = StringIO("")
idf = IDF(fhandle)
- idfobject = idf.newidfobject(objtype, objname)
+ idfobject = idf.newidfobject(objtype, Name=objname)
result = hvacbuilder.getnodefieldname(idfobject, endswith, fluid)
assert result == nodefieldname
@@ -327,8 +329,8 @@ def test_connectcomponents():
tdata = (
(
- [(idf.newidfobject("PIPE:ADIABATIC", "pipe1"), None),
- (idf.newidfobject("PIPE:ADIABATIC", "pipe2"), None)],
+ [(idf.newidfobject("PIPE:ADIABATIC", Name="pipe1"), None),
+ (idf.newidfobject("PIPE:ADIABATIC", Name="pipe2"), None)],
["pipe1_Inlet_Node_Name", ["pipe2_Inlet_Node_Name",
"pipe1_pipe2_node"]],
[["pipe1_Outlet_Node_Name", "pipe1_pipe2_node"],
@@ -336,9 +338,9 @@ def test_connectcomponents():
),
# components_thisnodes, inlets, outlets, fluid
(
- [(idf.newidfobject("Coil:Cooling:Water".upper(), "pipe1"),
+ [(idf.newidfobject("Coil:Cooling:Water".upper(), Name="pipe1"),
'Water_'),
- (idf.newidfobject("Coil:Cooling:Water".upper(), "pipe2"),
+ (idf.newidfobject("Coil:Cooling:Water".upper(), Name="pipe2"),
'Water_')],
['pipe1_Water_Inlet_Node_Name', '',
'pipe2_Water_Inlet_Node_Name',
@@ -349,8 +351,8 @@ def test_connectcomponents():
),
# components_thisnodes, inlets, outlets, fluid
(
- [(idf.newidfobject("PIPE:ADIABATIC".upper(), "pipe1"), None),
- (idf.newidfobject("Coil:Cooling:Water".upper(), "pipe2"),
+ [(idf.newidfobject("PIPE:ADIABATIC".upper(), Name="pipe1"), None),
+ (idf.newidfobject("Coil:Cooling:Water".upper(), Name="pipe2"),
'Water_')],
["pipe1_Inlet_Node_Name", "pipe2_Water_Inlet_Node_Name",
['pipe2_Air_Inlet_Node_Name', 'pipe1_pipe2_node']],
@@ -415,7 +417,7 @@ def test_initinletoutlet():
fhandle = StringIO("")
idf = IDF(fhandle)
for idfobjectkey, idfobjname, thisnode, force, inlets, outlets in tdata:
- idfobject = idf.newidfobject(idfobjectkey, idfobjname)
+ idfobject = idf.newidfobject(idfobjectkey, Name=idfobjname)
inodefields = hvacbuilder.getfieldnamesendswith(
idfobject,
"Inlet_Node_Name")
@@ -487,7 +489,7 @@ def test_componentsintobranch():
for ii, (idftxt, complst, fluid, branchcomps) in enumerate(tdata):
fhandle = StringIO(idftxt)
idf = IDF(fhandle)
- components_thisnodes = [(idf.newidfobject(key, nm), thisnode)
+ components_thisnodes = [(idf.newidfobject(key, Name=nm), thisnode)
for key, nm, thisnode in complst]
fnc = hvacbuilder.initinletoutlet
components_thisnodes = [(fnc(idf, cp, thisnode), thisnode)
@@ -519,14 +521,14 @@ def test_replacebranch():
'PIPE:ADIABATIC',
'np2', 'np1_np2_node', 'np2_Outlet_Node_Name', ''
]
- ), # loopname, sloop, dloop, branchname, componenttuple, fluid, outbranch
+ ), # loopname, sloop, dloop, branchname, componenttuple, fluid, outbranch
)
for (loopname, sloop, dloop, branchname,
componenttuple, fluid, outbranch) in tdata:
fhandle = StringIO("")
idf = IDF(fhandle)
loop = hvacbuilder.makeplantloop(idf, loopname, sloop, dloop)
- components_thisnodes = [(idf.newidfobject(key, nm), thisnode)
+ components_thisnodes = [(idf.newidfobject(key, Name=nm), thisnode)
for key, nm, thisnode in componenttuple]
branch = idf.getobject('BRANCH', branchname)
newbr = hvacbuilder.replacebranch(idf, loop, branch,
@@ -539,11 +541,11 @@ def test_makepipecomponent():
(
"apipe",
['PIPE:ADIABATIC', 'apipe',
- 'apipe_inlet', 'apipe_outlet']), # pname, pipe_obj
+ 'apipe_inlet', 'apipe_outlet']), # pname, pipe_obj
(
"bpipe",
['PIPE:ADIABATIC', 'bpipe',
- 'bpipe_inlet', 'bpipe_outlet']), # pname, pipe_obj
+ 'bpipe_inlet', 'bpipe_outlet']), # pname, pipe_obj
)
for pname, pipe_obj in tdata:
fhandle = StringIO("")
@@ -556,7 +558,7 @@ def test_makeductcomponent():
tdata = ((
'aduct',
['DUCT', 'aduct', 'aduct_inlet', 'aduct_outlet']
- ), # dname, duct_obj
+ ), # dname, duct_obj
)
for dname, duct_obj in tdata:
fhandle = StringIO("")
@@ -582,7 +584,7 @@ def test_makepipebranch():
'p_branch_pipe',
'p_branch_pipe_inlet',
'p_branch_pipe_outlet']
- ), # pb_name, branch_obj, pipe_obj
+ ), # pb_name, branch_obj, pipe_obj
)
for pb_name, branch_obj, pipe_obj in tdata:
fhandle = StringIO("")
@@ -610,7 +612,7 @@ def test_makeductbranch():
'DUCT',
'd_branch_duct',
'd_branch_duct_inlet',
- 'd_branch_duct_outlet']), # db_name, branch_obj, duct_obj
+ 'd_branch_duct_outlet']), # db_name, branch_obj, duct_obj
)
for db_name, branch_obj, duct_obj in tdata:
fhandle = StringIO("")
@@ -620,39 +622,25 @@ def test_makeductbranch():
theduct = idf.getobject('DUCT', result.Component_1_Name)
assert theduct.obj == duct_obj
-def test_flattencopy():
- """py.test for flattencopy"""
- tdata = (([1, 2], [1, 2]), #lst , nlst -a
- ([1, 2, [3, 4]], [1, 2, 3, 4]), #lst , nlst
- ([1, 2, [3, [4, 5, 6], 7, 8]], [1, 2, 3, 4, 5, 6, 7, 8]),
- #lst , nlst
- ([1, 2, [3, [4, 5, [6, 7], 8], 9]], [1, 2, 3, 4, 5, 6, 7, 8, 9]),
- #lst , nlst
- )
- for lst, nlst in tdata:
- result = hvacbuilder.flattencopy(lst)
- assert result == nlst
-
-def test__clean_listofcomponents():
+def test_clean_listofcomponents():
"""py.test for _clean_listofcomponents"""
data = (
- ([1, 2], [(1, None), (2, None)]), # lst, clst
- ([(1, None), 2], [(1, None), (2, None)]), # lst, clst
- ([(1, 'stuff'), 2], [(1, 'stuff'), (2, None)]), # lst, clst
+ ([1, 2], [(1, None), (2, None)]), # lst, clst
+ ([(1, None), 2], [(1, None), (2, None)]), # lst, clst
+ ([(1, 'stuff'), 2], [(1, 'stuff'), (2, None)]), # lst, clst
)
for lst, clst in data:
result = hvacbuilder._clean_listofcomponents(lst)
assert result == clst
-def test__clean_listofcomponents_tuples():
+def test_clean_listofcomponents_tuples():
"""py.test for _clean_listofcomponents_tuples"""
data = (
- ([(1, 2), (2, 3)], [(1, 2, None), (2, 3, None)]), #lst, clst
- ([(1, 2, None), (2, 3)], [(1, 2, None), (2, 3, None)]), #lst, clst
- ([(1, 2, 'stuff'), (2, 3)], [(1, 2, 'stuff'), (2, 3, None)]), #lst, clst
+ ([(1, 2), (2, 3)], [(1, 2, None), (2, 3, None)]), # lst, clst
+ ([(1, 2, None), (2, 3)], [(1, 2, None), (2, 3, None)]), # lst, clst
+ ([(1, 2, 'stuff'), (2, 3)], [(1, 2, 'stuff'), (2, 3, None)]), # lst, clst
)
for lst, clst in data:
result = hvacbuilder._clean_listofcomponents_tuples(lst)
assert result == clst
-
\ No newline at end of file
diff --git a/eppy/tests/test_modeleditor.py b/eppy/tests/test_modeleditor.py
index 469669c..8dabb19 100644
--- a/eppy/tests/test_modeleditor.py
+++ b/eppy/tests/test_modeleditor.py
@@ -5,25 +5,28 @@
# http://opensource.org/licenses/MIT)
# =======================================================================
"""py.test for modeleditor"""
+
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
-from six import StringIO
-from six import string_types
-from eppy import modeleditor
-from eppy.iddcurrent import iddcurrent
-from eppy.modeleditor import IDF
-from eppy.pytest_helpers import almostequal
from itertools import product
import os
+import warnings
import pytest
+from six import StringIO
+from six import string_types
+from eppy import modeleditor
+from eppy.bunch_subclass import Bunch
+from eppy.iddcurrent import iddcurrent
import eppy.idfreader as idfreader
+from eppy.modeleditor import IDF
+from eppy.pytest_helpers import almostequal
import eppy.snippet as snippet
-from eppy.bunch_subclass import Bunch
+
iddsnippet = iddcurrent.iddtxt
idfsnippet = snippet.idfsnippet
@@ -39,7 +42,7 @@ iddfhandle = StringIO(iddcurrent.iddtxt)
if IDF.getiddname() == None:
IDF.setiddname(iddfhandle)
-
+
def test_poptrailing():
"""py.test for poptrailing"""
tdata = (
@@ -340,6 +343,30 @@ def test_newidfobject():
assert obj.fieldvalues[1] == 'A Wall'
+def test_newidfobject_warning():
+ """Test that the warning for newidfobject created with `aname` is working.
+
+ Fails if the warning is not issued when `aname` is used, or if the warning
+ is issued when `aname` is not used.
+ """
+ # make a blank idf
+ # make a function for this and then continue.
+ idf = IDF()
+ idf.new()
+ objtype = 'material:airgap'.upper()
+ # expect warnings here
+ pytest.warns(UserWarning, idf.newidfobject, objtype, aname="Krypton")
+ pytest.warns(UserWarning, idf.newidfobject, objtype, "Krypton")
+ # expect no warnings here
+ # This works because pytest.warn raises an exception if no warning is
+ # produced. We expect this and catch it with pytest.raises.
+ pytest.raises(
+ Exception,
+ pytest.warns, UserWarning,
+ idf.newidfobject,
+ objtype, Name="Krypton")
+
+
def test_save():
"""
Test the IDF.save() function using a filehandle to avoid external effects.
@@ -358,7 +385,7 @@ def test_save():
def test_save_with_lineendings_and_encodings():
"""
- Test the IDF.save() function with combinations of encodings and line
+ Test the IDF.save() function with combinations of encodings and line
endings.
"""
@@ -442,7 +469,7 @@ def test_initread():
idf = IDF()
idf.initread(fname)
assert idf.getobject('BUILDING', 'Building')
-
+
# test fname as str
fname = str('tmp.idf')
assert isinstance(fname, string_types)
@@ -474,7 +501,7 @@ def test_initreadtxt():
0.16, !- Conductivity {W/m-K}
800, !- Density {kg/m3}
1090; !- Specific Heat {J/kg-K}
-
+
Construction,
Interior Wall, !- Name
G01a 19mm gypsum board, !- Outside Layer
@@ -542,8 +569,8 @@ def test_refname2key():
), # refname, key
(
'AllCurves',
- [u'PUMP:VARIABLESPEED',
- u'PUMP:CONSTANTSPEED', u'BOILER:HOTWATER',
+ [u'PUMP:VARIABLESPEED',
+ u'PUMP:CONSTANTSPEED', u'BOILER:HOTWATER',
u'ENERGYMANAGEMENTSYSTEM:CURVEORTABLEINDEXVARIABLE'],
), # refname, key
)
@@ -559,7 +586,7 @@ def test_getiddgroupdict():
{
None: ['Lead Input', 'Simulation Data']
},
- ), # gdict,
+ ), # gdict,
)
for gdict, in data:
fhandle = StringIO("")
@@ -584,8 +611,8 @@ def test_idfinmsequence():
material = materials.pop(0)
assert material.theidf == None
assert materials[0].theidf == idf
-
-
+
+
def test_idd_index():
"""py.test to see if idd_index is returned"""
idftxt = """"""
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 4
} | 0.5 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": null,
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | beautifulsoup4==4.13.3
decorator==5.2.1
-e git+https://github.com/santoshphilip/eppy.git@0c4d92cdb82974cd9fedacc6d1273b7a4deb03fe#egg=eppy
exceptiongroup==1.2.2
iniconfig==2.1.0
munch==4.0.0
packaging==24.2
pluggy==1.5.0
pydot3k==1.0.17
pyparsing==3.2.3
pytest==8.3.5
six==1.17.0
soupsieve==2.6
tinynumpy==1.2.1
tomli==2.2.1
typing_extensions==4.13.0
| name: eppy
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- beautifulsoup4==4.13.3
- decorator==5.2.1
- exceptiongroup==1.2.2
- iniconfig==2.1.0
- munch==4.0.0
- packaging==24.2
- pluggy==1.5.0
- pydot3k==1.0.17
- pyparsing==3.2.3
- pytest==8.3.5
- six==1.17.0
- soupsieve==2.6
- tinynumpy==1.2.1
- tomli==2.2.1
- typing-extensions==4.13.0
prefix: /opt/conda/envs/eppy
| [
"eppy/tests/test_bunch_subclass.py::test_EpBunch",
"eppy/tests/test_bunch_subclass.py::test_extendlist",
"eppy/tests/test_bunch_subclass.py::TestEpBunch::test_fieldnames",
"eppy/tests/test_bunch_subclass.py::TestEpBunch::test_fieldvalues",
"eppy/tests/test_bunch_subclass.py::TestEpBunch::test_getrange",
"eppy/tests/test_bunch_subclass.py::TestEpBunch::test_checkrange",
"eppy/tests/test_bunch_subclass.py::TestEpBunch::test_getfieldidd",
"eppy/tests/test_bunch_subclass.py::TestEpBunch::test_getfieldidd_item",
"eppy/tests/test_bunch_subclass.py::TestEpBunch::test_get_retaincase",
"eppy/tests/test_bunch_subclass.py::TestEpBunch::test_isequal",
"eppy/tests/test_bunch_subclass.py::TestEpBunch::test_getreferingobjs",
"eppy/tests/test_bunch_subclass.py::TestEpBunch::test_get_referenced_object",
"eppy/tests/test_bunch_subclass.py::test_EpBunch1",
"eppy/tests/test_hvacbuilder.py::test_flattencopy",
"eppy/tests/test_hvacbuilder.py::test_makeplantloop",
"eppy/tests/test_hvacbuilder.py::test_makecondenserloop",
"eppy/tests/test_hvacbuilder.py::test_getbranchcomponents",
"eppy/tests/test_hvacbuilder.py::test_renamenodes",
"eppy/tests/test_hvacbuilder.py::test_getfieldnamesendswith",
"eppy/tests/test_hvacbuilder.py::test_getnodefieldname",
"eppy/tests/test_hvacbuilder.py::test_connectcomponents",
"eppy/tests/test_hvacbuilder.py::test_initinletoutlet",
"eppy/tests/test_hvacbuilder.py::test_componentsintobranch",
"eppy/tests/test_hvacbuilder.py::test_replacebranch",
"eppy/tests/test_hvacbuilder.py::test_makepipecomponent",
"eppy/tests/test_hvacbuilder.py::test_makeductcomponent",
"eppy/tests/test_hvacbuilder.py::test_makepipebranch",
"eppy/tests/test_hvacbuilder.py::test_makeductbranch",
"eppy/tests/test_hvacbuilder.py::test_clean_listofcomponents",
"eppy/tests/test_hvacbuilder.py::test_clean_listofcomponents_tuples",
"eppy/tests/test_modeleditor.py::test_poptrailing",
"eppy/tests/test_modeleditor.py::test_extendlist",
"eppy/tests/test_modeleditor.py::test_namebunch",
"eppy/tests/test_modeleditor.py::test_getnamedargs",
"eppy/tests/test_modeleditor.py::test_getrefnames",
"eppy/tests/test_modeleditor.py::test_getallobjlists",
"eppy/tests/test_modeleditor.py::test_rename",
"eppy/tests/test_modeleditor.py::test_zonearea_zonevolume",
"eppy/tests/test_modeleditor.py::test_new",
"eppy/tests/test_modeleditor.py::test_newidfobject",
"eppy/tests/test_modeleditor.py::test_save",
"eppy/tests/test_modeleditor.py::test_save_with_lineendings_and_encodings",
"eppy/tests/test_modeleditor.py::test_saveas",
"eppy/tests/test_modeleditor.py::test_savecopy",
"eppy/tests/test_modeleditor.py::test_initread",
"eppy/tests/test_modeleditor.py::test_initreadtxt",
"eppy/tests/test_modeleditor.py::test_idfstr",
"eppy/tests/test_modeleditor.py::test_refname2key",
"eppy/tests/test_modeleditor.py::test_getiddgroupdict",
"eppy/tests/test_modeleditor.py::test_idfinmsequence",
"eppy/tests/test_modeleditor.py::test_idd_index"
]
| [
"eppy/tests/test_modeleditor.py::test_newidfobject_warning"
]
| []
| []
| MIT License | 1,082 | [
"eppy/runner/run_functions.py",
"eppy/modeleditor.py",
"eppy/bunch_subclass.py",
"eppy/hvacbuilder.py"
]
| [
"eppy/runner/run_functions.py",
"eppy/modeleditor.py",
"eppy/bunch_subclass.py",
"eppy/hvacbuilder.py"
]
|
|
streamlink__streamlink-696 | 192571b8ab4e4ebc1c7391257825e5347fcb2645 | 2017-03-11 23:37:55 | 4b09107829230265fbb5e54a28fccbf1332d0cc4 | diff --git a/docs/install.rst b/docs/install.rst
index 3a3362a0..fc30d34a 100644
--- a/docs/install.rst
+++ b/docs/install.rst
@@ -9,9 +9,9 @@ Linux and BSD packages
==================================== ===========================================
Distribution Installing
==================================== ===========================================
-`Arch Linux (aur)`_ .. code-block:: console
+`Arch Linux`_ .. code-block:: console
- # pacaur -S streamlink
+ # pacman -S streamlink
`Arch Linux (aur, git)`_ .. code-block:: console
@@ -42,7 +42,7 @@ Distribution Installing
# xbps-install streamlink
==================================== ===========================================
-.. _Arch Linux (aur): https://aur.archlinux.org/packages/streamlink/
+.. _Arch Linux: https://www.archlinux.org/packages/community/any/streamlink/
.. _Arch Linux (aur, git): https://aur.archlinux.org/packages/streamlink-git/
.. _Fedora: https://apps.fedoraproject.org/packages/python-streamlink
.. _Gentoo Linux: https://packages.gentoo.org/package/net-misc/streamlink
@@ -78,7 +78,8 @@ Package maintainers
==================================== ===========================================
Distribution/Platform Maintainer
==================================== ===========================================
-Arch Josip Ponjavic <josipponjavic at gmail.com>
+Arch Giancarlo Razzolini <grazzolini at archlinux.org>
+Arch (aur, git) Josip Ponjavic <josipponjavic at gmail.com>
Chocolatey Scott Walters <me at scowalt.com>
Fedora Mohamed El Morabity <melmorabity at fedoraproject.org>
Gentoo soredake <fdsfgs at krutt.org>
@@ -168,6 +169,17 @@ Name Notes
audio and video streams, eg. YouTube 1080p+
==================================== ===========================================
+Using pycrypto and pycountry
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+With these two environment variables it is possible to use `pycrypto`_ instead of
+`pycryptodome`_ and `pycountry`_ instead of `iso-639`_ and `iso3166`_.
+
+.. code-block:: console
+
+ $ export STREAMLINK_USE_PYCRYPTO="true"
+ $ export STREAMLINK_USE_PYCOUNTRY="true"
+
.. _Python: http://python.org/
.. _python-setuptools: http://pypi.python.org/pypi/setuptools
.. _python-argparse: http://pypi.python.org/pypi/argparse
@@ -175,6 +187,8 @@ Name Notes
.. _python-requests: http://python-requests.org/
.. _python-singledispatch: http://pypi.python.org/pypi/singledispatch
.. _RTMPDump: http://rtmpdump.mplayerhq.hu/
+.. _pycountry: https://pypi.python.org/pypi/pycountry
+.. _pycrypto: https://www.dlitz.net/software/pycrypto/
.. _pycryptodome: https://pycryptodome.readthedocs.io/en/latest/
.. _python-librtmp: https://github.com/chrippa/python-librtmp
.. _ffmpeg: https://www.ffmpeg.org/
diff --git a/docs/plugin_matrix.rst b/docs/plugin_matrix.rst
index e3e4dfe2..bce36dbc 100644
--- a/docs/plugin_matrix.rst
+++ b/docs/plugin_matrix.rst
@@ -33,12 +33,17 @@ beattv be-at.tv Yes Yes Playlist not implemented ye
bigo - live.bigo.tv Yes --
- bigoweb.co
bilibili live.bilibili.com Yes ?
-bliptv blip.tv -- Yes
bongacams bongacams.com Yes No Only RTMP streams are available.
btv btv.bg Yes No Requires login, and geo-restricted to Bulgaria.
+camsoda camsoda.com Yes No
canalplus - canalplus.fr Yes Yes Streams may be geo-restricted to France.
- c8.fr
- cstar.fr
+canlitv - canlitv.com Yes --
+ - canlitv.life
+ - canlitvlive.co
+ - canlitvlive.live
+ - ecanlitvizle.net
cdnbg - tv.bnt.bg Yes No Streams may be geo-restricted to Bulgaria.
- bgonair.bg
- kanal3.bg
@@ -71,7 +76,8 @@ dogus - startv.com.tr Yes No
- ntv.com.tr
- eurostartv.com.tr
dommune dommune.com Yes --
-douyutv douyutv.com Yes --
+douyutv - douyu.com Yes Yes
+ - v.douyu.com
dplay - dplay.se -- Yes Streams may be geo-restricted.
Only non-premium streams currently supported.
- dplay.no
@@ -87,7 +93,7 @@ foxtr fox.com.tr Yes No
funimationnow - funimation.com -- Yes
- funimationnow.uk
furstream furstre.am Yes No
-gaminglive gaminglive.tv Yes Yes
+garena garena.live Yes --
gomexp gomexp.com Yes No
goodgame goodgame.ru Yes No Only HLS streams are available.
gulli replay.gulli.fr Yes Yes Streams may be geo-restricted to France.
@@ -98,15 +104,12 @@ huya huya.com Yes No Temporarily only HLS stream
ine ine.com --- Yes
itvplayer itv.com/itvplayer Yes Yes Streams may be geo-restricted to Great Britain.
kanal7 kanal7.com Yes No
-letontv leton.tv Yes --
livecoding livecoding.tv Yes --
liveme liveme.com Yes --
-livestation livestation.com Yes --
livestream new.livestream.com Yes --
media_ccc_de - media.ccc.de Yes Yes Only mp4 and HLS are supported.
- streaming... [4]_
mediaklikk mediaklikk.hu Yes No Streams may be geo-restricted to Hungary.
-meerkat meerkatapp.co Yes --
mips mips.tv Yes -- Requires rtmpdump with K-S-V patches.
mitele mitele.es Yes No Streams may be geo-restricted to Spain.
mlgtv mlg.tv Yes --
@@ -126,9 +129,9 @@ pandatv panda.tv Yes ?
periscope periscope.tv Yes Yes Replay/VOD is supported.
picarto picarto.tv Yes --
playtv playtv.fr Yes -- Streams may be geo-restricted to France.
-pluzz pluzz.francetv.fr Yes Yes Streams may be geo-restricted to France, Andorra and Monaco.
- ludo.fr
- zouzous.fr
+pluzz - pluzz.francetv.fr Yes Yes Streams may be geo-restricted to France, Andorra and Monaco.
+ - ludo.fr
+ - zouzous.fr
powerapp powerapp.com.tr Yes No
raiplay raiplay.it Yes No Most streams are geo-restricted to Italy.
rtlxl rtlxl.nl No Yes Streams may be geo-restricted to The Netherlands. Livestreams not supported.
@@ -180,7 +183,7 @@ tv4play - tv4play.se Yes Yes Streams may be geo-restrict
tv8 tv8.com.tr Yes No
tv8cat tv8.cat Yes No Streams may be geo-restricted to Spain/Catalunya.
tv360 tv360.com.tr Yes No
-tvcatchup - tvcatchup.com Yes No Streams may be geo-restricted to Great Britain.
+tvcatchup tvcatchup.com Yes No Streams may be geo-restricted to Great Britain.
tvplayer tvplayer.com Yes No Streams may be geo-restricted to Great Britain. Premium streams are not supported.
tvrby tvr.by Yes No Streams may be geo-restricted to Belarus.
tvrplus tvrplus.ro Yes No Streams may be geo-restricted to Romania.
@@ -191,9 +194,7 @@ vaughnlive - vaughnlive.tv Yes --
- breakers.tv
- instagib.tv
- vapers.tv
-veetle veetle.com Yes Yes
vgtv vgtv.no Yes Yes
-viagame viagame.com
viasat - tv3play.se Yes Yes Streams may be geo-restricted.
- tv3play.no
- tv3play.dk
@@ -224,4 +225,4 @@ zhanqitv zhanqi.tv Yes No
.. [2] streamingvideoprovider.co.uk
.. [3] original.livestream.com
.. [4] streaming.media.ccc.de
-.. [5] mediathek.daserste.de
\ No newline at end of file
+.. [5] mediathek.daserste.de
diff --git a/setup.py b/setup.py
index 1b85eae7..5768a0d3 100644
--- a/setup.py
+++ b/setup.py
@@ -25,8 +25,12 @@ if version_info[0] == 2 or (version_info[0] == 3 and version_info[1] < 4):
deps.append("requests>=2.2,!=2.12.0,!=2.12.1,<3.0")
-# this version of pycryptodome is known to work and has a Windows wheel for py2.7, py3.3-3.6
-deps.append("pycryptodome>=3.4.3,<4")
+# for encrypted streams
+if environ.get("STREAMLINK_USE_PYCRYPTO"):
+ deps.append("pycrypto")
+else:
+ # this version of pycryptodome is known to work and has a Windows wheel for py2.7, py3.3-3.6
+ deps.append("pycryptodome>=3.4.3,<4")
# shutil.get_terminal_size and which were added in Python 3.3
if version_info[0] == 2:
diff --git a/src/streamlink/plugins/alieztv.py b/src/streamlink/plugins/aliez.py
similarity index 94%
rename from src/streamlink/plugins/alieztv.py
rename to src/streamlink/plugins/aliez.py
index 56069dc6..9313ef5c 100644
--- a/src/streamlink/plugins/alieztv.py
+++ b/src/streamlink/plugins/aliez.py
@@ -8,13 +8,8 @@ from streamlink.plugin.api import http, validate
from streamlink.stream import HTTPStream, RTMPStream
_url_re = re.compile(r"""
- http(s)?://(\w+\.)?aliez.tv
- (?:
- /live/[^/]+
- )?
- (?:
- /video/\d+/[^/]+
- )?
+ https?://(\w+\.)?aliez.\w+/
+ (?:live/[^/]+|video/\d+/[^/]+)
""", re.VERBOSE)
_file_re = re.compile(r"\"?file\"?:\s+['\"]([^'\"]+)['\"]")
_swf_url_re = re.compile(r"swfobject.embedSWF\(\"([^\"]+)\",")
diff --git a/src/streamlink/plugins/app17.py b/src/streamlink/plugins/app17.py
index b1e996ec..d0a17598 100644
--- a/src/streamlink/plugins/app17.py
+++ b/src/streamlink/plugins/app17.py
@@ -3,7 +3,7 @@ import re
from streamlink.plugin import Plugin
from streamlink.plugin.api import http, validate, useragents
from streamlink.plugin.api.utils import parse_json
-from streamlink.stream import HLSStream, RTMPStream
+from streamlink.stream import HLSStream, RTMPStream, HTTPStream
API_URL = "https://api-dsa.17app.co/api/v1/liveStreams/isUserOnLiveStream"
ROOM_URL = "http://17app.co/share/live/{0}"
@@ -58,11 +58,14 @@ class App17(Plugin):
return
url = _rtmp_re.search(res.text).group(1)
- stream = RTMPStream(self.session, {
- "rtmp": url,
- "live": True
- })
- yield "live", stream
+ if 'rtmp:' in url:
+ stream = RTMPStream(self.session, {
+ "rtmp": url,
+ "live": True
+ })
+ yield "live", stream
+ else:
+ yield "live", HTTPStream(self.session, url)
prefix = url.replace("rtmp:", "http:").replace(".flv", ".m3u8")
if '.m3u8' not in prefix:
@@ -71,8 +74,7 @@ class App17(Plugin):
yield stream
else:
url = prefix
- stream = HLSStream(self.session, url)
- yield "live", stream
+ yield "live", HLSStream(self.session, url)
__plugin__ = App17
diff --git a/src/streamlink/plugins/bbciplayer.py b/src/streamlink/plugins/bbciplayer.py
index 37c0213b..d4c37ad9 100644
--- a/src/streamlink/plugins/bbciplayer.py
+++ b/src/streamlink/plugins/bbciplayer.py
@@ -9,6 +9,7 @@ from streamlink.plugin import Plugin
from streamlink.plugin.api import http
from streamlink.plugin.api import validate
from streamlink.stream import HDSStream
+from streamlink.stream import HLSStream
from streamlink.utils import parse_xml, parse_json
@@ -24,12 +25,19 @@ class BBCiPlayer(Plugin):
swf_url = "http://emp.bbci.co.uk/emp/SMPf/1.18.3/StandardMediaPlayerChromelessFlash.swf"
hash = base64.b64decode(b"N2RmZjc2NzFkMGM2OTdmZWRiMWQ5MDVkOWExMjE3MTk5MzhiOTJiZg==")
api_url = ("http://open.live.bbc.co.uk/mediaselector/5/select/"
- "version/2.0/mediaset/pc/vpid/{vpid}/atk/{vpid_hash}/asn/1/")
+ "version/2.0/mediaset/{platform}/vpid/{vpid}/atk/{vpid_hash}/asn/1/")
+ platforms = ("pc", "iptv-all")
+
mediaselector_schema = validate.Schema(
validate.transform(partial(parse_xml, ignore_ns=True)),
- validate.xml_findall(".//media[@kind='video']//connection[@transferFormat='hds']"),
- [validate.all(validate.getattr("attrib"), validate.get("href"))],
- validate.transform(lambda x: list(set(x))) # unique
+ validate.union({
+ "hds": validate.xml_findall(".//media[@kind='video']//connection[@transferFormat='hds']"),
+ "hls": validate.xml_findall(".//media[@kind='video']//connection[@transferFormat='hls']")
+ }),
+ {validate.text: validate.all(
+ [validate.all(validate.getattr("attrib"), validate.get("href"))],
+ validate.transform(lambda x: list(set(x))) # unique
+ )}
)
@classmethod
@@ -53,9 +61,15 @@ class BBCiPlayer(Plugin):
return m and m.group(1)
def mediaselector(self, vpid):
- url = self.api_url.format(vpid=vpid, vpid_hash=self._hash_vpid(vpid))
- stream_urls = http.get(url, schema=self.mediaselector_schema)
- return stream_urls
+ for platform in self.platforms:
+ url = self.api_url.format(vpid=vpid, vpid_hash=self._hash_vpid(vpid), platform=platform)
+ stream_urls = http.get(url, schema=self.mediaselector_schema)
+ for surl in stream_urls.get("hls"):
+ for s in HLSStream.parse_variant_playlist(self.session, surl).items():
+ yield s
+ for surl in stream_urls.get("hds"):
+ for s in HDSStream.parse_manifest(self.session, surl).items():
+ yield s
def _get_streams(self):
m = self.url_re.match(self.url)
@@ -67,10 +81,8 @@ class BBCiPlayer(Plugin):
vpid = self.find_vpid(self.url)
if vpid:
self.logger.debug("Found VPID: {0}", vpid)
- s = self.mediaselector(vpid)
- for url in s:
- for s in HDSStream.parse_manifest(self.session, url).items():
- yield s
+ for s in self.mediaselector(vpid):
+ yield s
else:
self.logger.error("Could not find VPID for episode {0}", episode_id)
elif channel_name:
@@ -78,9 +90,8 @@ class BBCiPlayer(Plugin):
tvip = self.find_tvip(self.url)
if tvip:
self.logger.debug("Found TVIP: {0}", tvip)
- s = self.mediaselector(tvip)
- for url in s:
- for s in HDSStream.parse_manifest(self.session, url).items():
- yield s
+ for s in self.mediaselector(tvip):
+ yield s
+
__plugin__ = BBCiPlayer
diff --git a/src/streamlink/plugins/bigo.py b/src/streamlink/plugins/bigo.py
index 98101c78..365d38cd 100644
--- a/src/streamlink/plugins/bigo.py
+++ b/src/streamlink/plugins/bigo.py
@@ -47,7 +47,7 @@ class BigoStream(Stream):
class Bigo(Plugin):
- _url_re = re.compile(r"https?://(live.bigo.tv/\d+|bigoweb.co/show/\d+)")
+ _url_re = re.compile(r"https?://(?:www\.)?(bigo\.tv/\d+|bigoweb\.co/show/\d+)")
_flashvars_re = flashvars = re.compile(
r'''^\s*(?<!<!--)<param.*value="tmp=(\d+)&channel=(\d+)&srv=(\d+\.\d+\.\d+\.\d+)&port=(\d+)"''',
re.M)
diff --git a/src/streamlink/plugins/bliptv.py b/src/streamlink/plugins/bliptv.py
deleted file mode 100644
index 6b252398..00000000
--- a/src/streamlink/plugins/bliptv.py
+++ /dev/null
@@ -1,74 +0,0 @@
-import re
-
-from streamlink.plugin import Plugin, PluginError
-from streamlink.plugin.api import http
-from streamlink.stream import HTTPStream
-
-_url_re = re.compile(r"(http(s)?://)?blip.tv/.*-(?P<videoid>\d+)")
-VIDEO_GET_URL = 'http://player.blip.tv/file/get/{0}'
-SINGLE_VIDEO_URL = re.compile(r'.*\.((mp4)|(mov)|(m4v)|(flv))')
-
-QUALITY_WEIGHTS = {
- "ultra": 1080,
- "high": 720,
- "medium": 480,
- "low": 240,
-}
-
-QUALITY_WEIGHTS_ULTRA = re.compile(r'ultra+_(?P<level>\d+)')
-
-
-def get_quality_dict(quality_list):
- quality_list.sort()
- quality_dict = {}
- i = 0
- for i, bitrate in enumerate(quality_list):
- if i == 0:
- quality_dict['%i' % bitrate] = 'low'
- elif i == 1:
- quality_dict['%i' % bitrate] = 'medium'
- elif i == 2:
- quality_dict['%i' % bitrate] = 'high'
- elif i == 3:
- quality_dict['%i' % bitrate] = 'ultra'
- else:
- quality_dict['%i' % bitrate] = 'ultra+_%i' % (i - 3)
- return quality_dict
-
-
-class bliptv(Plugin):
- @classmethod
- def can_handle_url(cls, url):
- return _url_re.match(url)
-
- @classmethod
- def stream_weight(cls, key):
- match_ultra = QUALITY_WEIGHTS_ULTRA.match(key)
- if match_ultra:
- ultra_level = int(match_ultra.group('level'))
- return 1080 * (ultra_level + 1), "bliptv"
- weight = QUALITY_WEIGHTS.get(key)
- if weight:
- return weight, "bliptv"
- return Plugin.stream_weight(key)
-
- def _get_streams(self):
- match = _url_re.match(self.url)
- videoid = match.group('videoid')
- get_return = http.get(VIDEO_GET_URL.format(videoid))
- json_decode = http.json(get_return)
- streams = {}
- quality_list = []
- for stream in json_decode:
- if SINGLE_VIDEO_URL.match(stream['direct_url']):
- quality_list.append(int(stream['video_bitrate']))
- if len(quality_list) == 0:
- return
- quality_dict = get_quality_dict(quality_list)
- for stream in json_decode:
- if SINGLE_VIDEO_URL.match(stream['direct_url']):
- streams[quality_dict[stream['video_bitrate']]] = HTTPStream(self.session, stream['direct_url'])
- return streams
-
-
-__plugin__ = bliptv
diff --git a/src/streamlink/plugins/camsoda.py b/src/streamlink/plugins/camsoda.py
new file mode 100644
index 00000000..b91f3841
--- /dev/null
+++ b/src/streamlink/plugins/camsoda.py
@@ -0,0 +1,96 @@
+import random
+import re
+
+from streamlink.plugin import Plugin
+from streamlink.plugin.api import http
+from streamlink.plugin.api import validate
+from streamlink.stream import HLSStream
+
+_url_re = re.compile(r"http(s)?://(www\.)?camsoda\.com/(?P<username>[^\"\']+)")
+
+_api_user_schema = validate.Schema(
+ {
+ "status": validate.any(int, validate.text),
+ validate.optional("user"): {
+ "online": validate.any(int, validate.text),
+ "chatstatus": validate.text,
+ }
+ }
+)
+
+_api_video_schema = validate.Schema(
+ {
+ "token": validate.text,
+ "app": validate.text,
+ "edge_servers": [validate.text],
+ "private_servers": [validate.text],
+ "mjpeg_server": validate.text,
+ "stream_name": validate.text
+ }
+)
+
+
+class Camsoda(Plugin):
+ API_URL_USER = "https://www.camsoda.com/api/v1/user/{0}"
+ API_URL_VIDEO = "https://www.camsoda.com/api/v1/video/vtoken/{0}?username=guest_{1}"
+ HLS_URL_VIDEO = "https://{server}/{app}/mp4:{stream_name}_mjpeg/playlist.m3u8?token={token}"
+
+ @classmethod
+ def can_handle_url(cls, url):
+ return _url_re.match(url)
+
+ def _stream_status(self, data_user):
+ invalid_username = data_user["status"] is False
+ if invalid_username:
+ self.logger.info("No validate username found for {0}".format(self.url))
+ return
+
+ is_offline = data_user["user"]["online"] is False
+ if is_offline:
+ self.logger.info("This stream is currently offline")
+ return
+
+ return True
+
+ def _get_api_user(self, username):
+ res = http.get(self.API_URL_USER.format(username))
+ data_user = http.json(res, schema=_api_user_schema)
+ return data_user
+
+ def _get_api_video(self, username):
+ res = http.get(self.API_URL_VIDEO.format(username, str(random.randint(1000, 99999))))
+ data_video = http.json(res, schema=_api_video_schema)
+ return data_video
+
+ def _get_hls_url(self, data_user, data_video):
+ is_edge = data_user["user"]["chatstatus"] == "online"
+ is_priv = data_user["user"]["chatstatus"] == "private"
+ if is_edge:
+ server = data_video["edge_servers"][0]
+ elif is_priv:
+ server = data_video["private_servers"][0]
+ else:
+ server = data_video["mjpeg_server"]
+
+ hls_url = self.HLS_URL_VIDEO.format(server=server, app=data_video["app"], stream_name=data_video["stream_name"], token=data_video["token"])
+ return hls_url
+
+ def _get_streams(self):
+ match = _url_re.match(self.url)
+ username = match.group("username")
+ username = username.replace("/", "")
+
+ data_user = self._get_api_user(username)
+ stream_status = self._stream_status(data_user)
+
+ if stream_status:
+ data_video = self._get_api_video(username)
+ hls_url = self._get_hls_url(data_user, data_video)
+
+ try:
+ for s in HLSStream.parse_variant_playlist(self.session, hls_url).items():
+ yield s
+ except IOError as err:
+ self.logger.error("Error parsing stream: {0}", err)
+
+__plugin__ = Camsoda
diff --git a/src/streamlink/plugins/canlitv.py b/src/streamlink/plugins/canlitv.py
new file mode 100644
index 00000000..4ff9877b
--- /dev/null
+++ b/src/streamlink/plugins/canlitv.py
@@ -0,0 +1,61 @@
+import re
+
+from streamlink.plugin import Plugin
+from streamlink.plugin.api import http
+from streamlink.plugin.api import useragents
+from streamlink.stream import HLSStream
+
+EMBED_URL_1 = "http://www.canlitv.life/kanallar.php?kanal={0}"
+EMBED_URL_2 = "http://www.ecanlitvizle.net/embed.php?kanal={0}"
+
+_m3u8_re = re.compile(r"file:(?:\s+)?(?:\'|\")(?P<url>[^\"']+)(?:\'|\")")
+_url_re = re.compile(r"""http(s)?://(?:www\.)?(?P<domain>
+ canlitv\.(com|life)
+ |
+ canlitvlive\.(co|live)
+ |
+ ecanlitvizle\.net
+ )
+ /(izle/|(?:onizleme|tv)\.php\?kanal=)?
+ (?P<channel>[\w\-\=]+)""", re.VERBOSE)
+
+
+class Canlitv(Plugin):
+ @classmethod
+ def can_handle_url(cls, url):
+ return _url_re.match(url)
+
+ def _get_streams(self):
+ match = _url_re.match(self.url)
+ channel = match.group("channel")
+ domain = match.group("domain")
+
+ headers = {
+ "Referer": self.url,
+ "User-Agent": useragents.FIREFOX
+ }
+
+ if domain == "canlitv.life":
+ res = http.get(EMBED_URL_1.format(channel), headers=headers)
+ elif domain == "ecanlitvizle.net":
+ res = http.get(EMBED_URL_2.format(channel), headers=headers)
+ else:
+ res = http.get(self.url, headers=headers)
+
+ url_match = _m3u8_re.search(res.text)
+
+ if url_match:
+ hls_url = url_match.group("url")
+
+ self.logger.debug("Found URL: {0}".format(hls_url))
+
+ try:
+ s = []
+ for s in HLSStream.parse_variant_playlist(self.session, hls_url).items():
+ yield s
+ if not s:
+ yield "live", HLSStream(self.session, hls_url)
+ except IOError as err:
+ self.logger.error("Failed to extract streams: {0}", err)
+
+__plugin__ = Canlitv
diff --git a/src/streamlink/plugins/douyutv.py b/src/streamlink/plugins/douyutv.py
index b085089f..fdb413d4 100644
--- a/src/streamlink/plugins/douyutv.py
+++ b/src/streamlink/plugins/douyutv.py
@@ -6,25 +6,32 @@ from requests.adapters import HTTPAdapter
from streamlink.plugin import Plugin
from streamlink.plugin.api import http, validate, useragents
-from streamlink.stream import HTTPStream, HLSStream
+from streamlink.stream import HTTPStream, HLSStream, RTMPStream
#new API and key from https://gist.github.com/spacemeowx2/629b1d131bd7e240a7d28742048e80fc by spacemeowx2
MAPI_URL = "https://m.douyu.com/html5/live?roomId={0}"
-LAPI_URL = "http://coapi.douyucdn.cn/lapi/live/thirdPart/getPlay/{0}?rate={1}"
-
+LAPI_URL = "https://coapi.douyucdn.cn/lapi/live/thirdPart/getPlay/{0}?cdn={1}&rate={2}"
LAPI_SECRET = "9TUk5fjjUjg9qIMH3sdnh"
-
+VAPI_URL = "https://vmobile.douyu.com/video/getInfo?vid={0}"
SHOW_STATUS_ONLINE = 1
SHOW_STATUS_OFFLINE = 2
STREAM_WEIGHTS = {
"low": 540,
"medium": 720,
"source": 1080
- }
+}
_url_re = re.compile(r"""
- http(s)?://(www\.)?douyu.com
- /(?P<channel>[^/]+)
+ http(s)?://
+ (?:
+ (?P<subdomain>.+)
+ \.
+ )?
+ douyu.com/
+ (?:
+ show/(?P<vid>[^/&?]+)|
+ (?P<channel>[^/&?]+)
+ )
""", re.VERBOSE)
_room_id_re = re.compile(r'"room_id\\*"\s*:\s*(\d+),')
@@ -78,6 +85,15 @@ _lapi_schema = validate.Schema(
validate.get("data")
)
+_vapi_schema = validate.Schema(
+ {
+ "data": validate.any(None, {
+ "video_url": validate.text
+ })
+ },
+ validate.get("data")
+)
+
class Douyutv(Plugin):
@classmethod
@@ -90,30 +106,40 @@ class Douyutv(Plugin):
return STREAM_WEIGHTS[stream], "douyutv"
return Plugin.stream_weight(stream)
- #quality:
- # 0:source 2:medium 1:low
- def _get_room_json(self, channel, quality):
+ def _get_room_json(self, channel, rate):
+ cdn = "ws" #cdns: ["ws", "tct", "ws2", "dl"]
ts = int(time.time())
- sign = hashlib.md5("lapi/live/thirdPart/getPlay/{0}?aid=pcclient&rate={1}&time={2}{3}".format(channel, quality, ts, LAPI_SECRET).encode('ascii')).hexdigest()
- data = {
+ sign = hashlib.md5("lapi/live/thirdPart/getPlay/{0}?aid=pcclient&cdn={1}&rate={2}&time={3}{4}".format(channel, cdn, rate, ts, LAPI_SECRET).encode("ascii")).hexdigest()
+ headers = {
"auth": sign,
"time": str(ts),
- "aid": 'pcclient'
+ "aid": "pcclient"
}
-
- res = http.get(LAPI_URL.format(channel, quality), headers=data)
+ res = http.get(LAPI_URL.format(channel, cdn, rate), headers=headers)
room = http.json(res, schema=_lapi_schema)
return room
def _get_streams(self):
match = _url_re.match(self.url)
- channel = match.group("channel")
+ subdomain = match.group("subdomain")
- http.headers.update({'User-Agent': useragents.CHROME})
http.verify = False
http.mount('https://', HTTPAdapter(max_retries=99))
+ if subdomain == 'v':
+ vid = match.group("vid")
+ headers = {
+ "User-Agent": useragents.ANDROID,
+ "X-Requested-With": "XMLHttpRequest"
+ }
+ res = http.get(VAPI_URL.format(vid), headers=headers)
+ room = http.json(res, schema=_vapi_schema)
+ yield "source", HLSStream(self.session, room["video_url"])
+ return
+
#Thanks to @ximellon for providing method.
+ channel = match.group("channel")
+ http.headers.update({'User-Agent': useragents.CHROME})
try:
channel = int(channel)
except ValueError:
@@ -131,15 +157,20 @@ class Douyutv(Plugin):
self.logger.info("Stream currently unavailable.")
return
- room_source = self._get_room_json(channel, 0)
- yield "source", HTTPStream(self.session, room_source['live_url'])
- yield "source", HLSStream(self.session, room_source['hls_url'])
+ rate = [0, 2, 1]
+ quality = ['source', 'medium', 'low']
+ for i in range(0, 3, 1):
+ room = self._get_room_json(channel, rate[i])
+ url = room["live_url"]
+ if 'rtmp:' in url:
+ stream = RTMPStream(self.session, {
+ "rtmp": url,
+ "live": True
+ })
+ yield quality[i], stream
+ else:
+ yield quality[i], HTTPStream(self.session, url)
+ yield quality[i], HLSStream(self.session, room["hls_url"])
- room_medium = self._get_room_json(channel, 2)
- yield "medium", HTTPStream(self.session, room_medium['live_url'])
- yield "medium", HLSStream(self.session, room_medium['hls_url'])
- room_low = self._get_room_json(channel, 1)
- yield "low", HTTPStream(self.session, room_low['live_url'])
- yield "low", HLSStream(self.session, room_low['hls_url'])
__plugin__ = Douyutv
diff --git a/src/streamlink/plugins/gaminglive.py b/src/streamlink/plugins/gaminglive.py
deleted file mode 100644
index 59fbdf9e..00000000
--- a/src/streamlink/plugins/gaminglive.py
+++ /dev/null
@@ -1,111 +0,0 @@
-import re
-
-from streamlink.plugin import Plugin
-from streamlink.plugin.api import http, validate
-from streamlink.stream import RTMPStream
-
-SWF_URL = "http://www.gaminglive.tv/lib/flowplayer/flash/flowplayer.commercial-3.2.18.swf"
-API_URL = "http://api.gaminglive.tv/{0}/{1}"
-VOD_RTMP_URL = "rtmp://gamingfs.fplive.net/gaming/{0}/"
-QUALITY_WEIGHTS = {
- "source": 5,
- "live": 5,
- "1080": 4,
- "720": 3,
- "480": 2,
- "medium": 2,
- "360": 1,
- "low": 1
-}
-
-_url_re = re.compile(r"""
- http(s)?://(\w+\.)?gaminglive\.tv
- /(?P<type>channels|videos)/(?P<name>[^/]+)
-""", re.VERBOSE)
-_quality_re = re.compile(r"[^/]+-(?P<quality>[^/]+)")
-
-_channel_schema = validate.Schema(
- {
- validate.optional("state"): {
- "stream": {
- "qualities": [validate.text],
- "rootUrl": validate.url(scheme="rtmp")
- }
- }
- },
- validate.get("state")
-)
-
-_vod_schema = validate.Schema(
- {
- "name": validate.text,
- "channel_slug": validate.text,
- "title": validate.text,
- "created_at": validate.transform(int)
- },
-)
-
-
-class GamingLive(Plugin):
- @classmethod
- def can_handle_url(self, url):
- return _url_re.match(url)
-
- @classmethod
- def stream_weight(cls, key):
- weight = QUALITY_WEIGHTS.get(key)
- if weight:
- return weight, "gaminglive"
-
- return Plugin.stream_weight(key)
-
- def _get_quality(self, label):
- match = _quality_re.match(label)
- if match:
- return match.group("quality")
-
- return "live"
-
- def _create_rtmp_stream(self, rtmp, playpath, live):
- return RTMPStream(self.session, {
- "rtmp": rtmp,
- "playpath": playpath,
- "pageUrl": self.url,
- "swfVfy": SWF_URL,
- "live": live
- })
-
- def _get_live_streams(self, name):
- res = http.get(API_URL.format("channels", name))
- json = http.json(res, schema=_channel_schema)
- if not json:
- return
-
- streams = {}
- for quality in json["stream"]["qualities"]:
- streams[self._get_quality(quality)] = self._create_rtmp_stream(json["stream"]["rootUrl"], quality, True)
-
- return streams
-
- def _get_vod_streams(self, name):
- res = http.get(API_URL.format("videos", name))
- json = http.json(res, schema=_vod_schema)
- if not json:
- return
-
- streams = {}
- streams["source"] = self._create_rtmp_stream(VOD_RTMP_URL.format(json["channel_slug"]), json["name"], True)
-
- return streams
-
- def _get_streams(self):
- match = _url_re.match(self.url)
- type = match.group("type")
-
- if type == "channels":
- return self._get_live_streams(match.group("name"))
- elif type == "videos":
- return self._get_vod_streams(match.group("name"))
-
-
-__plugin__ = GamingLive
diff --git a/src/streamlink/plugins/garena.py b/src/streamlink/plugins/garena.py
new file mode 100644
index 00000000..8349ebc7
--- /dev/null
+++ b/src/streamlink/plugins/garena.py
@@ -0,0 +1,69 @@
+import re
+
+from streamlink.plugin import Plugin
+from streamlink.plugin.api import http
+from streamlink.plugin.api import validate
+from streamlink.stream import HLSStream
+
+_url_re = re.compile(r"https?\:\/\/garena\.live\/(?:(?P<channel_id>\d+)|(?P<alias>\w+))")
+
+
+class Garena(Plugin):
+ API_INFO = "https://garena.live/api/channel_info_get"
+ API_STREAM = "https://garena.live/api/channel_stream_get"
+
+ _info_schema = validate.Schema(
+ {
+ "reply": validate.any({
+ "channel_id": int,
+ }, None),
+ "result": validate.text
+ }
+ )
+ _stream_schema = validate.Schema(
+ {
+ "reply": validate.any({
+ "streams": [
+ {
+ "url": validate.text,
+ "resolution": int,
+ "bitrate": int,
+ "format": int
+ }
+ ]
+ }, None),
+ "result": validate.text
+ }
+ )
+
+ @classmethod
+ def can_handle_url(self, url):
+ return _url_re.match(url)
+
+ def _post_api(self, api, payload, schema):
+ res = http.post(api, json=payload)
+ data = http.json(res, schema=schema)
+
+ if data["result"] == "success":
+ post_data = data["reply"]
+ return post_data
+
+ def _get_streams(self):
+ match = _url_re.match(self.url)
+ if match.group("alias"):
+ payload = {"alias": match.group("alias")}
+ info_data = self._post_api(self.API_INFO, payload, self._info_schema)
+ channel_id = info_data["channel_id"]
+ elif match.group("channel_id"):
+ channel_id = int(match.group("channel_id"))
+
+ if channel_id:
+ payload = {"channel_id": channel_id}
+ stream_data = self._post_api(self.API_STREAM, payload, self._stream_schema)
+ for stream in stream_data["streams"]:
+ n = "{0}p".format(stream["resolution"])
+ if stream["format"] == 3:
+ s = HLSStream(self.session, stream["url"])
+ yield n, s
+
+__plugin__ = Garena
diff --git a/src/streamlink/plugins/letontv.py b/src/streamlink/plugins/letontv.py
deleted file mode 100644
index a2c15803..00000000
--- a/src/streamlink/plugins/letontv.py
+++ /dev/null
@@ -1,80 +0,0 @@
-import re
-
-from streamlink.plugin import Plugin
-from streamlink.plugin.api import http, validate
-from streamlink.stream import RTMPStream
-
-PLAYER_URL = "http://leton.tv/player.php"
-SWF_URL = "http://files.leton.tv/jwplayer.flash.swf"
-
-_url_re = re.compile(r"""
- http?://(\w+.)?leton.tv
- (?:
- /player\.php\?.*streampage=
- )?
- (?:
- /broadcast/
- )?
- (?P<streampage>[^/?&]+)
-""", re.VERBOSE)
-_js_var_re = re.compile(r"var (?P<var>\w+)\s?=\s?'?(?P<value>[^;']+)'?;")
-_rtmp_re = re.compile(r"/(?P<app>[^/]+)/(?P<playpath>.+)")
-
-
-def _parse_server_ip(values):
- octets = [
- values["a"] / values["f"],
- values["b"] / values["f"],
- values["c"] / values["f"],
- values["d"] / values["f"],
- ]
-
- return ".".join(str(int(octet)) for octet in octets)
-
-
-_schema = validate.Schema(
- validate.transform(_js_var_re.findall),
- validate.transform(dict),
- {
- "a": validate.transform(int),
- "b": validate.transform(int),
- "c": validate.transform(int),
- "d": validate.transform(int),
- "f": validate.transform(int),
- "v_part": validate.text,
- },
- validate.union({
- "server_ip": validate.transform(_parse_server_ip),
- "path": validate.all(
- validate.get("v_part"),
- validate.transform(_rtmp_re.findall),
- validate.get(0)
- )
- })
-)
-
-
-class LetOnTV(Plugin):
- @classmethod
- def can_handle_url(self, url):
- return _url_re.match(url)
-
- def _get_streams(self):
- match = _url_re.match(self.url)
- info = http.get(PLAYER_URL, params=match.groupdict(), schema=_schema)
- if not info["path"]:
- return
-
- app, playpath = info["path"]
- stream = RTMPStream(self.session, {
- "rtmp": "rtmp://{0}/{1}".format(info["server_ip"], app),
- "playpath": playpath,
- "pageUrl": self.url,
- "swfUrl": SWF_URL,
- "live": True
- })
-
- return dict(live=stream)
-
-
-__plugin__ = LetOnTV
diff --git a/src/streamlink/plugins/livestation.py b/src/streamlink/plugins/livestation.py
deleted file mode 100644
index dfa7155f..00000000
--- a/src/streamlink/plugins/livestation.py
+++ /dev/null
@@ -1,87 +0,0 @@
-import re
-
-from streamlink.plugin import Plugin, PluginError, PluginOptions
-from streamlink.plugin.api import http, validate
-from streamlink.stream import HLSStream
-
-LOGIN_PAGE_URL = "http://www.livestation.com/en/users/new"
-LOGIN_POST_URL = "http://www.livestation.com/en/sessions.json"
-
-_csrf_token_re = re.compile(r"<meta content=\"([^\"]+)\" name=\"csrf-token\"")
-_hls_playlist_re = re.compile(r"<meta content=\"([^\"]+.m3u8)\" property=\"og:video\" />")
-_url_re = re.compile(r"http(s)?://(\w+\.)?livestation.com")
-
-_csrf_token_schema = validate.Schema(
- validate.transform(_csrf_token_re.search),
- validate.any(None, validate.get(1))
-)
-_hls_playlist_schema = validate.Schema(
- validate.transform(_hls_playlist_re.search),
- validate.any(
- None,
- validate.all(
- validate.get(1),
- validate.url(scheme="http", path=validate.endswith(".m3u8"))
- )
- )
-)
-_login_schema = validate.Schema({
- "email": validate.text,
- validate.optional("errors"): validate.all(
- {
- "base": [validate.text]
- },
- validate.get("base"),
- )
-})
-
-
-class Livestation(Plugin):
- options = PluginOptions({
- "email": "",
- "password": ""
- })
-
- @classmethod
- def can_handle_url(self, url):
- return _url_re.match(url)
-
- def _authenticate(self, email, password):
- csrf_token = http.get(LOGIN_PAGE_URL, schema=_csrf_token_schema)
- if not csrf_token:
- raise PluginError("Unable to find CSRF token")
-
- data = {
- "authenticity_token": csrf_token,
- "channel_id": "",
- "commit": "Login",
- "plan_id": "",
- "session[email]": email,
- "session[password]": password,
- "utf8": "\xE2\x9C\x93", # Check Mark Character
- }
-
- res = http.post(LOGIN_POST_URL, data=data, acceptable_status=(200, 422))
- result = http.json(res, schema=_login_schema)
-
- errors = result.get("errors")
- if errors:
- errors = ", ".join(errors)
- raise PluginError("Unable to authenticate: {0}".format(errors))
-
- self.logger.info("Successfully logged in as {0}", result["email"])
-
- def _get_streams(self):
- login_email = self.options.get("email")
- login_password = self.options.get("password")
- if login_email and login_password:
- self._authenticate(login_email, login_password)
-
- hls_playlist = http.get(self.url, schema=_hls_playlist_schema)
- if not hls_playlist:
- return
-
- return HLSStream.parse_variant_playlist(self.session, hls_playlist)
-
-
-__plugin__ = Livestation
diff --git a/src/streamlink/plugins/meerkat.py b/src/streamlink/plugins/meerkat.py
deleted file mode 100644
index 8e48fe19..00000000
--- a/src/streamlink/plugins/meerkat.py
+++ /dev/null
@@ -1,26 +0,0 @@
-import re
-
-from streamlink.plugin import Plugin
-from streamlink.stream import HLSStream
-
-
-_url_re = re.compile(r"http(s)?://meerkatapp.co/(?P<user>[\w\-\=]+)/(?P<token>[\w\-]+)")
-
-
-class Meerkat(Plugin):
- @classmethod
- def can_handle_url(cls, url):
- return _url_re.match(url)
-
- def _get_streams(self):
- match = _url_re.match(self.url)
- if not match:
- return
-
- streams = {}
- streams["live"] = HLSStream(self.session, "http://cdn.meerkatapp.co/broadcast/{0}/live.m3u8".format(match.group("token")))
-
- return streams
-
-
-__plugin__ = Meerkat
diff --git a/src/streamlink/plugins/pluzz.py b/src/streamlink/plugins/pluzz.py
index 63f74160..db49cdc1 100644
--- a/src/streamlink/plugins/pluzz.py
+++ b/src/streamlink/plugins/pluzz.py
@@ -7,6 +7,7 @@ from streamlink.plugin import Plugin, PluginOptions
from streamlink.plugin.api import http, validate
from streamlink.stream import HDSStream, HLSStream, HTTPStream
from streamlink.stream.ffmpegmux import MuxedStream
+from streamlink.utils import update_scheme
class Pluzz(Plugin):
@@ -18,7 +19,7 @@ class Pluzz(Plugin):
_pluzz_video_id_re = re.compile(r'id="current_video" href="http://.+?\.(?:francetv|francetelevisions)\.fr/(?:video/|\?id-video=)(?P<video_id>.+?)"')
_other_video_id_re = re.compile(r'playlist: \[{.*?,"identity":"(?P<video_id>.+?)@(?P<catalogue>Ludo|Zouzous)"')
_player_re = re.compile(r'src="(?P<player>//staticftv-a\.akamaihd\.net/player/jquery\.player.+?-[0-9a-f]+?\.js)"></script>')
- _swf_re = re.compile(r'getUrl\("(?P<swf>/bower_components/player_flash/dist/FranceTVNVPVFlashPlayer\.akamai.+?\.swf)"\)')
+ _swf_re = re.compile(r'//staticftv-a\.akamaihd\.net/player/bower_components/player_flash/dist/FranceTVNVPVFlashPlayer\.akamai-[0-9a-f]+\.swf')
_hds_pv_data_re = re.compile(r"~data=.+?!")
_mp4_bitrate_re = re.compile(r'.*-(?P<bitrate>[0-9]+k)\.mp4')
@@ -102,11 +103,11 @@ class Pluzz(Plugin):
match = self._player_re.search(res.text)
swf_url = None
if match is not None:
- player_url = 'http:' + match.group('player')
+ player_url = update_scheme(self.url, match.group('player'))
res = http.get(player_url)
match = self._swf_re.search(res.text)
if match is not None:
- swf_url = os.path.dirname(player_url) + match.group('swf')
+ swf_url = update_scheme(self.url, match.group(0))
res = http.get(self.API_URL.format(video_id, catalogue))
videos = http.json(res, schema=self._api_schema)
diff --git a/src/streamlink/plugins/veetle.py b/src/streamlink/plugins/veetle.py
deleted file mode 100644
index 3a1c19b5..00000000
--- a/src/streamlink/plugins/veetle.py
+++ /dev/null
@@ -1,63 +0,0 @@
-import re
-
-from streamlink.compat import urlparse
-from streamlink.plugin import Plugin
-from streamlink.plugin.api import http, validate
-from streamlink.stream import FLVPlaylist, HTTPStream
-
-API_URL = "http://veetle.com/index.php/stream/ajaxStreamLocation/{0}/flash"
-
-_url_re = re.compile(r"""
- http(s)?://(\w+\.)?veetle.com
- (:?
- /.*(v|view)/
- (?P<channel>[^/]+/[^/&?]+)
- )?
-""", re.VERBOSE)
-
-_schema = validate.Schema({
- validate.optional("isLive"): bool,
- "payload": validate.any(int, validate.url(scheme="http")),
- "success": bool
-})
-
-
-class Veetle(Plugin):
- @classmethod
- def can_handle_url(self, url):
- return _url_re.match(url)
-
- def _get_streams(self):
- self.url = http.resolve_url(self.url)
- match = _url_re.match(self.url)
- parsed = urlparse(self.url)
- if parsed.fragment:
- channel_id = parsed.fragment
- elif parsed.path[:3] == '/v/':
- channel_id = parsed.path.split('/')[-1]
- else:
- channel_id = match.group("channel")
-
- if not channel_id:
- return
-
- channel_id = channel_id.lower().replace("/", "_")
- res = http.get(API_URL.format(channel_id))
- info = http.json(res, schema=_schema)
-
- if not info["success"]:
- return
-
- if info.get("isLive"):
- name = "live"
- else:
- name = "vod"
-
- stream = HTTPStream(self.session, info["payload"])
- # Wrap the stream in a FLVPlaylist to verify the FLV tags
- stream = FLVPlaylist(self.session, [stream])
-
- return {name: stream}
-
-
-__plugin__ = Veetle
diff --git a/src/streamlink/plugins/viagame.py b/src/streamlink/plugins/viagame.py
deleted file mode 100644
index 29bab733..00000000
--- a/src/streamlink/plugins/viagame.py
+++ /dev/null
@@ -1,84 +0,0 @@
-"""Plugin for Viasat's gaming site Viagame."""
-
-import re
-
-from streamlink.plugin.api import http, validate
-from streamlink.plugin.api.utils import parse_json
-from streamlink.plugin.api.support_plugin import viasat
-
-STREAM_API_URL = "http://playapi.mtgx.tv/v3/videos/stream/{0}"
-
-_embed_url_re = re.compile(
- '<meta itemprop="embedURL" content="http://www.viagame.com/embed/video/([^"]+)"'
-)
-_store_data_re = re.compile(r"window.fluxData\s*=\s*JSON.parse\(\"(.+)\"\);")
-_url_re = re.compile(r"http(s)?://(www\.)?viagame.com/channels/.+")
-
-_store_schema = validate.Schema(
- {
- "initialStoresData": [{
- "instanceName": validate.text,
- "storeName": validate.text,
- "initialData": validate.any(dict, list)
- }]
- },
- validate.get("initialStoresData")
-)
-_match_store_schema = validate.Schema(
- {
- "match": {
- "id": validate.text,
- "type": validate.text,
- "videos": [{
- "id": validate.text,
- "play_id": validate.text,
- }]
- }
- },
- validate.get("match")
-)
-
-
-class Viagame(viasat.Viasat):
- @classmethod
- def can_handle_url(cls, url):
- return _url_re.match(url)
-
- def _find_store(self, res, name):
- match = _store_data_re.search(res.text)
- if not match:
- return
-
- stores_data = parse_json(match.group(1).replace('\\"', '"'),
- schema=_store_schema)
- if not stores_data:
- return
-
- for store in filter(lambda s: s["instanceName"] == name, stores_data):
- return store
-
- def _find_video_id(self, res):
- match = _embed_url_re.search(res.text)
- if match:
- return match.group(1)
-
- def _find_stream_id(self):
- res = http.get(self.url)
- video_id = self._find_video_id(res)
- match_store = self._find_store(res, "matchStore")
- if not (video_id and match_store):
- return
-
- match_store = _match_store_schema.validate(match_store["initialData"])
- for video in filter(lambda v: v["id"] == video_id, match_store["videos"]):
- return video["play_id"]
-
- def _get_streams(self):
- stream_id = self._find_stream_id()
- if not stream_id:
- return
-
- return self._extract_streams(stream_id)
-
-
-__plugin__ = Viagame
diff --git a/src/streamlink/utils/l10n.py b/src/streamlink/utils/l10n.py
index 1791446b..461fbecc 100644
--- a/src/streamlink/utils/l10n.py
+++ b/src/streamlink/utils/l10n.py
@@ -12,7 +12,9 @@ except ImportError: # pragma: no cover
PYCOUNTRY = True
-DEFAULT_LANGUAGE_CODE = "en_US"
+DEFAULT_LANGUAGE = "en"
+DEFAULT_COUNTRY = "US"
+DEFAULT_LANGUAGE_CODE = "{0}_{1}".format(DEFAULT_LANGUAGE, DEFAULT_COUNTRY)
class Country(object):
@@ -116,8 +118,15 @@ class Localization(object):
def language_code(self):
return self._language_code
+ def _parse_locale_code(self, language_code):
+ parts = language_code.split("_", 1)
+ if len(parts) != 2 or len(parts[0]) != 2 or len(parts[1]) != 2:
+ raise LookupError("Invalid language code: {0}".format(language_code))
+ return self.get_language(parts[0]), self.get_country(parts[1])
+
@language_code.setter
def language_code(self, language_code):
+ is_system_locale = language_code is None
if language_code is None:
try:
language_code, _ = locale.getdefaultlocale()
@@ -125,16 +134,19 @@ class Localization(object):
language_code = None
if language_code is None or language_code == "C":
# cannot be determined
- language_code = DEFAULT_LANGUAGE_CODE
-
- parts = language_code.split("_", 1)
+ language_code = DEFAULT_LANGUAGE
- if len(parts) != 2 or len(parts[0]) != 2 or len(parts[1]) != 2:
- raise LookupError("Invalid language code: {0}".format(language_code))
-
- self._language_code = language_code
- self.language = self.get_language(parts[0])
- self.country = self.get_country(parts[1])
+ try:
+ self.language, self.country = self._parse_locale_code(language_code)
+ self._language_code = language_code
+ except LookupError:
+ if is_system_locale:
+ # If the system locale returns an invalid code, use the default
+ self.language = self.get_language(DEFAULT_LANGUAGE)
+ self.country = self.get_country(DEFAULT_COUNTRY)
+ self._language_code = DEFAULT_LANGUAGE_CODE
+ else:
+ raise
def equivalent(self, language=None, country=None):
equivalent = True
diff --git a/src/streamlink_cli/argparser.py b/src/streamlink_cli/argparser.py
index de4127a6..94018533 100644
--- a/src/streamlink_cli/argparser.py
+++ b/src/streamlink_cli/argparser.py
@@ -1063,21 +1063,6 @@ plugin.add_argument(
region restrictions
"""
)
-plugin.add_argument(
- "--livestation-email",
- metavar="EMAIL",
- help="""
- A Livestation account email to access restricted or premium
- quality streams.
- """
-)
-plugin.add_argument(
- "--livestation-password",
- metavar="PASSWORD",
- help="""
- A Livestation account password to use with --livestation-email.
- """
-)
plugin.add_argument(
"--btv-username",
metavar="USERNAME",
diff --git a/src/streamlink_cli/main.py b/src/streamlink_cli/main.py
index 73d24932..db743137 100644
--- a/src/streamlink_cli/main.py
+++ b/src/streamlink_cli/main.py
@@ -827,14 +827,6 @@ def setup_plugin_options():
streamlink.set_plugin_option("crunchyroll", "locale",
args.crunchyroll_locale)
- if args.livestation_email:
- streamlink.set_plugin_option("livestation", "email",
- args.livestation_email)
-
- if args.livestation_password:
- streamlink.set_plugin_option("livestation", "password",
- args.livestation_password)
-
if args.btv_username:
streamlink.set_plugin_option("btv", "username", args.btv_username)
| Plugin request: Aliez.tv to Aliez.me
### Checklist
- [x] This is a bug report.
- [x] This is a plugin request.
- [ ] This is a feature request.
- [ ] I used the search function to find already opened/closed issues or pull requests.
### Description
Aliez.tv does not exist anymore there is Aliez.me which is actually the same streaming site.
### Reproduction steps / Stream URLs to test
1. http://aliez.me/live/fkpev212/ link im trying to open in player | streamlink/streamlink | diff --git a/tests/test_localization.py b/tests/test_localization.py
index 102a6472..4a17da9c 100644
--- a/tests/test_localization.py
+++ b/tests/test_localization.py
@@ -63,6 +63,13 @@ class LocalizationTestsMixin(object):
self.assertEqual("en_US", l.language_code)
self.assertTrue(l.equivalent(language="en", country="US"))
+ @patch("locale.getdefaultlocale")
+ def test_default_invalid(self, getdefaultlocale):
+ getdefaultlocale.return_value = ("en_150", None)
+ l = l10n.Localization()
+ self.assertEqual("en_US", l.language_code)
+ self.assertTrue(l.equivalent(language="en", country="US"))
+
def test_get_country(self):
self.assertEqual("US",
l10n.Localization.get_country("USA").alpha2)
diff --git a/tests/test_plugin_bigo.py b/tests/test_plugin_bigo.py
new file mode 100644
index 00000000..99bd4e17
--- /dev/null
+++ b/tests/test_plugin_bigo.py
@@ -0,0 +1,31 @@
+import unittest
+
+from streamlink.plugins.bigo import Bigo
+
+
+class TestPluginBongacams(unittest.TestCase):
+ def test_can_handle_url(self):
+ # Correct urls
+ self.assertTrue(Bigo.can_handle_url("http://www.bigoweb.co/show/00000000"))
+ self.assertTrue(Bigo.can_handle_url("https://www.bigoweb.co/show/00000000"))
+ self.assertTrue(Bigo.can_handle_url("http://bigoweb.co/show/00000000"))
+ self.assertTrue(Bigo.can_handle_url("https://bigoweb.co/show/00000000"))
+ self.assertTrue(Bigo.can_handle_url("http://bigo.tv/00000000"))
+ self.assertTrue(Bigo.can_handle_url("https://bigo.tv/00000000"))
+ self.assertTrue(Bigo.can_handle_url("https://www.bigo.tv/00000000"))
+ self.assertTrue(Bigo.can_handle_url("http://www.bigo.tv/00000000"))
+
+ # Old URLs don't work anymore
+ self.assertFalse(Bigo.can_handle_url("http://live.bigo.tv/00000000"))
+ self.assertFalse(Bigo.can_handle_url("https://live.bigo.tv/00000000"))
+
+ # Wrong URL structure
+ self.assertFalse(Bigo.can_handle_url("ftp://www.bigo.tv/00000000"))
+ self.assertFalse(Bigo.can_handle_url("https://www.bigo.tv/show/00000000"))
+ self.assertFalse(Bigo.can_handle_url("http://www.bigo.tv/show/00000000"))
+ self.assertFalse(Bigo.can_handle_url("http://bigo.tv/show/00000000"))
+ self.assertFalse(Bigo.can_handle_url("https://bigo.tv/show/00000000"))
+
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/tests/test_plugin_camsoda.py b/tests/test_plugin_camsoda.py
new file mode 100644
index 00000000..444ffbca
--- /dev/null
+++ b/tests/test_plugin_camsoda.py
@@ -0,0 +1,68 @@
+import unittest
+
+from streamlink import Streamlink
+
+try:
+ from unittest.mock import patch, Mock
+except ImportError:
+ from mock import patch, Mock
+
+from streamlink.plugins.camsoda import Camsoda
+from streamlink.stream import HLSStream
+
+
+class TestPluginCamsoda(unittest.TestCase):
+ def setUp(self):
+ self.session = Streamlink()
+ self.plugin = Camsoda("https://www.camsoda.com/stream-name")
+
+ def test_can_handle_url(self):
+ # should match
+ self.assertTrue(Camsoda.can_handle_url("https://www.camsoda.com/stream-name"))
+ self.assertTrue(Camsoda.can_handle_url("https://www.camsoda.com/streamname"))
+ self.assertTrue(Camsoda.can_handle_url("https://www.camsoda.com/username"))
+
+ # shouldn't match
+ self.assertFalse(Camsoda.can_handle_url("http://local.local/"))
+ self.assertFalse(Camsoda.can_handle_url("http://localhost.localhost/"))
+
+ def test_get_hls_url(self):
+ api_data_video = {
+ "token": "abcdefghijklmnopqrstuvwxyz123456",
+ "app": "cam",
+ "edge_servers": ["edge.server", "edge.server2"],
+ "private_servers": ["priv.server", "priv.server2"],
+ "mjpeg_server": "mjpeg.server",
+ "stream_name": "username_enc4"
+ }
+
+ values = [
+ {
+ "api_data_user": {"user": {"chatstatus": "online"}},
+ "server": "edge.server"
+ }, {
+ "api_data_user": {"user": {"chatstatus": "private"}},
+ "server": "priv.server"
+ }, {
+ "api_data_user": {"user": {"chatstatus": "foobar"}},
+ "server": "mjpeg.server"
+ }
+ ]
+ for data in values:
+ data_video = api_data_video
+ data_user = data["api_data_user"]
+ server = data["server"]
+
+ HLS_URL_VIDEO = self.plugin._get_hls_url(data_user, data_video)
+ HLS_URL_VIDEO_TEST = self.plugin.HLS_URL_VIDEO.format(server=server, app=data_video["app"], stream_name=data_video["stream_name"], token=data_video["token"])
+
+ self.assertEqual(HLS_URL_VIDEO, HLS_URL_VIDEO_TEST)
+
+ @patch('streamlink.plugins.camsoda.http')
+ @patch('streamlink.plugins.camsoda.HLSStream')
+ def test_get_streams(self, hlsstream, mock_http):
+ hlsstream.parse_variant_playlist.return_value = {"test": HLSStream(self.session, "http://test.se/stream1")}
+
+ streams = self.plugin.get_streams()
+
+ self.assertTrue("test" in streams)
diff --git a/tests/test_plugin_canlitv.py b/tests/test_plugin_canlitv.py
new file mode 100644
index 00000000..30c27155
--- /dev/null
+++ b/tests/test_plugin_canlitv.py
@@ -0,0 +1,18 @@
+import unittest
+
+from streamlink.plugins.canlitv import Canlitv
+
+
+class TestPluginCanlitv(unittest.TestCase):
+ def test_can_handle_url(self):
+ # should match
+ self.assertTrue(Canlitv.can_handle_url("http://www.canlitv.com/channel"))
+ self.assertTrue(Canlitv.can_handle_url("http://www.canlitv.life/channel"))
+ self.assertTrue(Canlitv.can_handle_url("http://www.canlitvlive.co/izle/channel.html"))
+ self.assertTrue(Canlitv.can_handle_url("http://www.canlitvlive.live/izle/channel.html"))
+ self.assertTrue(Canlitv.can_handle_url("http://www.ecanlitvizle.net/channel/"))
+ self.assertTrue(Canlitv.can_handle_url("http://www.ecanlitvizle.net/onizleme.php?kanal=channel"))
+ self.assertTrue(Canlitv.can_handle_url("http://www.ecanlitvizle.net/tv.php?kanal=channel"))
+ # shouldn't match
+ self.assertFalse(Canlitv.can_handle_url("http://www.canlitv.com"))
+ self.assertFalse(Canlitv.can_handle_url("http://www.ecanlitvizle.net"))
diff --git a/tests/test_plugin_garena.py b/tests/test_plugin_garena.py
new file mode 100644
index 00000000..05b82d2a
--- /dev/null
+++ b/tests/test_plugin_garena.py
@@ -0,0 +1,86 @@
+import json
+import unittest
+
+from streamlink import Streamlink
+
+try:
+ from unittest.mock import patch, Mock
+except ImportError:
+ from mock import patch, Mock
+
+from streamlink.plugins.garena import Garena
+
+
+class TestPluginGarena(unittest.TestCase):
+ def setUp(self):
+ self.session = Streamlink()
+
+ def test_can_handle_url(self):
+ # should match
+ self.assertTrue(Garena.can_handle_url("https://garena.live/LOLTW"))
+ self.assertTrue(Garena.can_handle_url("https://garena.live/358220"))
+
+ # shouldn't match
+ self.assertFalse(Garena.can_handle_url("http://local.local/"))
+ self.assertFalse(Garena.can_handle_url("http://localhost.localhost/"))
+
+ @patch('streamlink.plugins.garena.http')
+ def test_post_api_info(self, mock_http):
+ API_INFO = Garena.API_INFO
+ schema = Garena._info_schema
+
+ api_data = {
+ "reply": {
+ "channel_id": 358220,
+ },
+ "result": "success"
+ }
+
+ api_resp = Mock()
+ api_resp.text = json.dumps(api_data)
+ mock_http.post.return_value = api_resp
+ mock_http.json.return_value = api_data
+
+ payload = {"alias": "LOLTW"}
+
+ plugin = Garena("https://garena.live/LOLTW")
+
+ info_data = plugin._post_api(API_INFO, payload, schema)
+
+ self.assertEqual(info_data["channel_id"], 358220)
+
+ mock_http.post.assert_called_with(API_INFO, json=dict(alias="LOLTW"))
+
+ @patch('streamlink.plugins.garena.http')
+ def test_post_api_stream(self, mock_http):
+ API_STREAM = Garena.API_STREAM
+ schema = Garena._stream_schema
+
+ api_data = {
+ "reply": {
+ "streams": [
+ {
+ "url": "https://test.se/stream1",
+ "bitrate": 0,
+ "resolution": 1080,
+ "format": 3
+ },
+ ]
+ },
+ "result": "success"
+ }
+
+ api_resp = Mock()
+ api_resp.text = json.dumps(api_data)
+ mock_http.post.return_value = api_resp
+ mock_http.json.return_value = api_data
+
+ payload = {"channel_id": 358220}
+
+ plugin = Garena("https://garena.live/358220")
+
+ stream_data = plugin._post_api(API_STREAM, payload, schema)
+
+ self.assertEqual(stream_data["streams"], api_data["reply"]["streams"])
+
+ mock_http.post.assert_called_with(API_STREAM, json=dict(channel_id=358220))
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_added_files",
"has_removed_files",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 0,
"test_score": 3
},
"num_modified_files": 12
} | 0.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"codecov",
"coverage",
"mock"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.6",
"reqs_path": [
"dev-requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
charset-normalizer==2.0.12
codecov==2.1.13
coverage==6.2
distlib==0.3.9
idna==3.10
importlib-metadata==4.8.3
iniconfig==1.1.1
iso-639==0.4.5
iso3166==2.1.1
Jinja2==3.0.3
MarkupSafe==2.0.1
mock==5.2.0
packaging==21.3
pluggy==1.0.0
py==1.11.0
pycryptodome==3.21.0
pynsist==2.8
pyparsing==3.1.4
pytest==7.0.1
pytest-cov==4.0.0
requests==2.27.1
requests_download==0.1.2
-e git+https://github.com/streamlink/streamlink.git@192571b8ab4e4ebc1c7391257825e5347fcb2645#egg=streamlink
tomli==1.2.3
typing_extensions==4.1.1
urllib3==1.26.20
yarg==0.1.10
zipp==3.6.0
| name: streamlink
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- charset-normalizer==2.0.12
- codecov==2.1.13
- coverage==6.2
- distlib==0.3.9
- idna==3.10
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- iso-639==0.4.5
- iso3166==2.1.1
- jinja2==3.0.3
- markupsafe==2.0.1
- mock==5.2.0
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pycryptodome==3.21.0
- pynsist==2.8
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-cov==4.0.0
- requests==2.27.1
- requests-download==0.1.2
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- yarg==0.1.10
- zipp==3.6.0
prefix: /opt/conda/envs/streamlink
| [
"tests/test_localization.py::TestLocalization::test_bad_language_code",
"tests/test_localization.py::TestLocalization::test_country_compare",
"tests/test_localization.py::TestLocalization::test_default",
"tests/test_localization.py::TestLocalization::test_default_invalid",
"tests/test_localization.py::TestLocalization::test_equivalent_remap",
"tests/test_localization.py::TestLocalization::test_get_country_miss",
"tests/test_localization.py::TestLocalization::test_get_language",
"tests/test_localization.py::TestLocalization::test_get_language_miss",
"tests/test_localization.py::TestLocalization::test_language_code_kr",
"tests/test_localization.py::TestLocalization::test_language_code_us",
"tests/test_localization.py::TestLocalization::test_language_compare",
"tests/test_localization.py::TestLocalization::test_not_equivalent",
"tests/test_localization.py::TestLocalization::test_pycountry",
"tests/test_plugin_bigo.py::TestPluginBongacams::test_can_handle_url",
"tests/test_plugin_camsoda.py::TestPluginCamsoda::test_can_handle_url",
"tests/test_plugin_camsoda.py::TestPluginCamsoda::test_get_hls_url",
"tests/test_plugin_camsoda.py::TestPluginCamsoda::test_get_streams",
"tests/test_plugin_canlitv.py::TestPluginCanlitv::test_can_handle_url",
"tests/test_plugin_garena.py::TestPluginGarena::test_can_handle_url",
"tests/test_plugin_garena.py::TestPluginGarena::test_post_api_info",
"tests/test_plugin_garena.py::TestPluginGarena::test_post_api_stream"
]
| [
"tests/test_localization.py::TestLocalization::test_equivalent",
"tests/test_localization.py::TestLocalization::test_get_country"
]
| []
| []
| BSD 2-Clause "Simplified" License | 1,083 | [
"src/streamlink/plugins/meerkat.py",
"docs/install.rst",
"src/streamlink/plugins/camsoda.py",
"src/streamlink/utils/l10n.py",
"src/streamlink/plugins/bbciplayer.py",
"src/streamlink/plugins/alieztv.py",
"docs/plugin_matrix.rst",
"src/streamlink/plugins/bigo.py",
"src/streamlink/plugins/app17.py",
"src/streamlink_cli/argparser.py",
"src/streamlink/plugins/letontv.py",
"src/streamlink/plugins/douyutv.py",
"src/streamlink/plugins/livestation.py",
"src/streamlink/plugins/veetle.py",
"setup.py",
"src/streamlink/plugins/garena.py",
"src/streamlink/plugins/viagame.py",
"src/streamlink/plugins/pluzz.py",
"src/streamlink_cli/main.py",
"src/streamlink/plugins/bliptv.py",
"src/streamlink/plugins/canlitv.py",
"src/streamlink/plugins/gaminglive.py"
]
| [
"src/streamlink/plugins/meerkat.py",
"docs/install.rst",
"src/streamlink/plugins/camsoda.py",
"src/streamlink/utils/l10n.py",
"src/streamlink/plugins/bbciplayer.py",
"src/streamlink/plugins/bigo.py",
"docs/plugin_matrix.rst",
"src/streamlink/plugins/app17.py",
"src/streamlink_cli/argparser.py",
"src/streamlink/plugins/letontv.py",
"src/streamlink/plugins/douyutv.py",
"src/streamlink/plugins/livestation.py",
"src/streamlink/plugins/veetle.py",
"setup.py",
"src/streamlink/plugins/garena.py",
"src/streamlink/plugins/viagame.py",
"src/streamlink/plugins/aliez.py",
"src/streamlink/plugins/pluzz.py",
"src/streamlink_cli/main.py",
"src/streamlink/plugins/bliptv.py",
"src/streamlink/plugins/canlitv.py",
"src/streamlink/plugins/gaminglive.py"
]
|
|
scrapy__scrapy-2649 | cfb56400b29316a43208053a8ef6d48eb0eb499e | 2017-03-13 14:15:42 | dfe6d3d59aa3de7a96c1883d0f3f576ba5994aa9 | diff --git a/scrapy/logformatter.py b/scrapy/logformatter.py
index e7bf7942e..075a6d862 100644
--- a/scrapy/logformatter.py
+++ b/scrapy/logformatter.py
@@ -43,6 +43,8 @@ class LogFormatter(object):
'request_flags' : request_flags,
'referer': referer_str(request),
'response_flags': response_flags,
+ # backward compatibility with Scrapy logformatter below 1.4 version
+ 'flags': response_flags
}
}
| After adding request flags subclasses of logformatter that rely on 'flags' format string are broken
#2082 added flags to request but it also renamed formatting string key from flags to response_flags/request_flags
```
CRAWLEDMSG = u"Crawled (%(status)s) %(request)s (referer: %(referer)s)%(flags)s"
+CRAWLEDMSG = u"Crawled (%(status)s) %(request)s%(request_flags)s (referer: %(referer)s)%(response_flags)s"
```
Scrapy allows you to override logformatter and this is what I have in my project. I have logformatter looking rouhgly like this
```python
# dirbot/logf.py
from scrapy.logformatter import LogFormatter
class CustomLogFormatter(LogFormatter):
def crawled(self, request, response, spider):
kwargs = super(CustomLogFormatter, self).crawled(
request, response, spider)
kwargs['msg'] = (
u"Crawled (%(status)s) %(request)s "
u"(referer: %(referer)s, latency: %(latency).2f s)%(flags)s"
)
kwargs['args']['latency'] = response.meta.get('download_latency', 0)
return kwargs
```
now if you enable it in settings `LOG_FORMATTER = 'dirbot.logf.CustomLogFormatter'
` and try to run it with recent master you'll get KeyError
```
2017-03-13 14:15:26 [scrapy.extensions.telnet] DEBUG: Telnet console listening on 127.0.0.1:6023
Traceback (most recent call last):
File "/usr/lib/python2.7/logging/__init__.py", line 851, in emit
msg = self.format(record)
File "/usr/lib/python2.7/logging/__init__.py", line 724, in format
return fmt.format(record)
File "/usr/lib/python2.7/logging/__init__.py", line 464, in format
record.message = record.getMessage()
File "/usr/lib/python2.7/logging/__init__.py", line 328, in getMessage
msg = msg % self.args
KeyError: u'flags'
Logged from file engine.py, line 238
Traceback (most recent call last):
File "/usr/lib/python2.7/logging/__init__.py", line 851, in emit
msg = self.format(record)
File "/usr/lib/python2.7/logging/__init__.py", line 724, in format
return fmt.format(record)
File "/usr/lib/python2.7/logging/__init__.py", line 464, in format
record.message = record.getMessage()
File "/usr/lib/python2.7/logging/__init__.py", line 328, in getMessage
msg = msg % self.args
KeyError: u'flags'
Logged from file engine.py, line 238
2017-03-13 14:15:27 [scrapy.core.scraper] DEBUG: Scraped from <200 http://www.dmoz.org/Computers/Programming/Languages/Python/Resources/>
```
So this change that renamed `flags` to `response_flags/request_flags` seems backward incompatible. | scrapy/scrapy | diff --git a/tests/test_logformatter.py b/tests/test_logformatter.py
index 11fe7b653..94e6c9fde 100644
--- a/tests/test_logformatter.py
+++ b/tests/test_logformatter.py
@@ -64,5 +64,30 @@ class LoggingContribTest(unittest.TestCase):
assert all(isinstance(x, six.text_type) for x in lines)
self.assertEqual(lines, [u"Scraped from <200 http://www.example.com>", u'name: \xa3'])
+
+class LogFormatterSubclass(LogFormatter):
+ def crawled(self, request, response, spider):
+ kwargs = super(LogFormatterSubclass, self).crawled(
+ request, response, spider)
+ CRAWLEDMSG = (
+ u"Crawled (%(status)s) %(request)s (referer: "
+ u"%(referer)s)%(flags)s"
+ )
+ return {
+ 'level': kwargs['level'],
+ 'msg': CRAWLEDMSG,
+ 'args': kwargs['args']
+ }
+
+
+class LogformatterSubclassTest(LoggingContribTest):
+ def setUp(self):
+ self.formatter = LogFormatterSubclass()
+ self.spider = Spider('default')
+
+ def test_flags_in_request(self):
+ pass
+
+
if __name__ == "__main__":
unittest.main()
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 1
} | 1.3 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest pytest-cov pytest-xdist pytest-mock pytest-asyncio",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==25.3.0
Automat==24.8.1
cffi==1.17.1
constantly==23.10.4
coverage==7.8.0
cryptography==44.0.2
cssselect==1.3.0
exceptiongroup==1.2.2
execnet==2.1.1
hyperlink==21.0.0
idna==3.10
incremental==24.7.2
iniconfig==2.1.0
jmespath==1.0.1
lxml==5.3.1
packaging==24.2
parsel==1.10.0
pluggy==1.5.0
pyasn1==0.6.1
pyasn1_modules==0.4.2
pycparser==2.22
PyDispatcher==2.0.7
pyOpenSSL==25.0.0
pytest==8.3.5
pytest-asyncio==0.26.0
pytest-cov==6.0.0
pytest-mock==3.14.0
pytest-xdist==3.6.1
queuelib==1.7.0
-e git+https://github.com/scrapy/scrapy.git@cfb56400b29316a43208053a8ef6d48eb0eb499e#egg=Scrapy
service-identity==24.2.0
six==1.17.0
tomli==2.2.1
Twisted==24.11.0
typing_extensions==4.13.0
w3lib==2.3.1
zope.interface==7.2
| name: scrapy
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==25.3.0
- automat==24.8.1
- cffi==1.17.1
- constantly==23.10.4
- coverage==7.8.0
- cryptography==44.0.2
- cssselect==1.3.0
- exceptiongroup==1.2.2
- execnet==2.1.1
- hyperlink==21.0.0
- idna==3.10
- incremental==24.7.2
- iniconfig==2.1.0
- jmespath==1.0.1
- lxml==5.3.1
- packaging==24.2
- parsel==1.10.0
- pluggy==1.5.0
- pyasn1==0.6.1
- pyasn1-modules==0.4.2
- pycparser==2.22
- pydispatcher==2.0.7
- pyopenssl==25.0.0
- pytest==8.3.5
- pytest-asyncio==0.26.0
- pytest-cov==6.0.0
- pytest-mock==3.14.0
- pytest-xdist==3.6.1
- queuelib==1.7.0
- service-identity==24.2.0
- six==1.17.0
- tomli==2.2.1
- twisted==24.11.0
- typing-extensions==4.13.0
- w3lib==2.3.1
- zope-interface==7.2
prefix: /opt/conda/envs/scrapy
| [
"tests/test_logformatter.py::LogformatterSubclassTest::test_crawled"
]
| []
| [
"tests/test_logformatter.py::LoggingContribTest::test_crawled",
"tests/test_logformatter.py::LoggingContribTest::test_dropped",
"tests/test_logformatter.py::LoggingContribTest::test_flags_in_request",
"tests/test_logformatter.py::LoggingContribTest::test_scraped",
"tests/test_logformatter.py::LogformatterSubclassTest::test_dropped",
"tests/test_logformatter.py::LogformatterSubclassTest::test_flags_in_request",
"tests/test_logformatter.py::LogformatterSubclassTest::test_scraped"
]
| []
| BSD 3-Clause "New" or "Revised" License | 1,085 | [
"scrapy/logformatter.py"
]
| [
"scrapy/logformatter.py"
]
|
|
dpkp__kafka-python-1029 | 899f11730db5f209c03cfad20111ec131ee4c70b | 2017-03-13 17:32:28 | 618c5051493693c1305aa9f08e8a0583d5fcf0e3 | diff --git a/kafka/consumer/group.py b/kafka/consumer/group.py
index f2b1699..32f4556 100644
--- a/kafka/consumer/group.py
+++ b/kafka/consumer/group.py
@@ -819,7 +819,7 @@ class KafkaConsumer(six.Iterator):
Returns:
set: {topic, ...}
"""
- return self._subscription.subscription
+ return self._subscription.subscription.copy()
def unsubscribe(self):
"""Unsubscribe from all topics and clear all assigned partitions."""
| KafkaConsumer.subscribe() should return copy
When calling ``consumer.subscription()`` a reference to the internal ``set()`` object ``self._subscription.subscription`` is returned.
If that is modified (eg to add a new topic) and then passed to ``consumer.subscribe(topics)`` then ``SubscriptionState.change_subscription`` will hit ``subscription unchanged by change_subscription`` and therefor not call ``self._group_subscription.update``.
It should not be so easy to modify the internal state of the ``SubscriptionState`` therefor consumer.subscription() should return a copy of ``self._subscription.subscription``
Example code, that does not work as expected:
```
consumer = KafkaConsumer()
topics = consumer.subscription()
topics.add('new-topic')
consumer.subscribe(topics) # Will log that subscription is unchanged
``` | dpkp/kafka-python | diff --git a/test/test_consumer.py b/test/test_consumer.py
index 073a3af..e5dd946 100644
--- a/test/test_consumer.py
+++ b/test/test_consumer.py
@@ -24,6 +24,14 @@ class TestKafkaConsumer(unittest.TestCase):
with self.assertRaises(KafkaConfigurationError):
KafkaConsumer(bootstrap_servers='localhost:9092', fetch_max_wait_ms=41000, request_timeout_ms=40000)
+ def test_subscription_copy(self):
+ consumer = KafkaConsumer('foo', api_version=(0, 10))
+ sub = consumer.subscription()
+ assert sub is not consumer.subscription()
+ assert sub == set(['foo'])
+ sub.add('fizz')
+ assert consumer.subscription() == set(['foo'])
+
class TestMultiProcessConsumer(unittest.TestCase):
@unittest.skipIf(sys.platform.startswith('win'), 'test mocking fails on windows')
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 1
} | 1.3 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-catchlog",
"pytest-sugar",
"pytest-mock",
"mock",
"python-snappy",
"lz4tools",
"xxhash"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc libsnappy-dev"
],
"python": "3.6",
"reqs_path": [
"docs/requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.13
attrs==22.2.0
Babel==2.11.0
certifi==2021.5.30
charset-normalizer==2.0.12
coverage==6.2
cramjam==2.5.0
docutils==0.18.1
idna==3.10
imagesize==1.4.1
importlib-metadata==4.8.3
iniconfig==1.1.1
Jinja2==3.0.3
-e git+https://github.com/dpkp/kafka-python.git@899f11730db5f209c03cfad20111ec131ee4c70b#egg=kafka_python
lz4tools==1.3.1.2
MarkupSafe==2.0.1
mock==5.2.0
packaging==21.3
pluggy==1.0.0
pockets==0.9.1
py==1.11.0
Pygments==2.14.0
pyparsing==3.1.4
pytest==7.0.1
pytest-catchlog==1.2.2
pytest-cov==4.0.0
pytest-mock==3.6.1
pytest-sugar==0.9.6
python-snappy==0.7.3
pytz==2025.2
requests==2.27.1
six==1.17.0
snowballstemmer==2.2.0
Sphinx==5.3.0
sphinx-rtd-theme==2.0.0
sphinxcontrib-applehelp==1.0.2
sphinxcontrib-devhelp==1.0.2
sphinxcontrib-htmlhelp==2.0.0
sphinxcontrib-jquery==4.1
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-napoleon==0.7
sphinxcontrib-qthelp==1.0.3
sphinxcontrib-serializinghtml==1.1.5
termcolor==1.1.0
tomli==1.2.3
typing_extensions==4.1.1
urllib3==1.26.20
xxhash==3.2.0
zipp==3.6.0
| name: kafka-python
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.13
- attrs==22.2.0
- babel==2.11.0
- charset-normalizer==2.0.12
- coverage==6.2
- cramjam==2.5.0
- docutils==0.18.1
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- jinja2==3.0.3
- lz4tools==1.3.1.2
- markupsafe==2.0.1
- mock==5.2.0
- packaging==21.3
- pluggy==1.0.0
- pockets==0.9.1
- py==1.11.0
- pygments==2.14.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-catchlog==1.2.2
- pytest-cov==4.0.0
- pytest-mock==3.6.1
- pytest-sugar==0.9.6
- python-snappy==0.7.3
- pytz==2025.2
- requests==2.27.1
- six==1.17.0
- snowballstemmer==2.2.0
- sphinx==5.3.0
- sphinx-rtd-theme==2.0.0
- sphinxcontrib-applehelp==1.0.2
- sphinxcontrib-devhelp==1.0.2
- sphinxcontrib-htmlhelp==2.0.0
- sphinxcontrib-jquery==4.1
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-napoleon==0.7
- sphinxcontrib-qthelp==1.0.3
- sphinxcontrib-serializinghtml==1.1.5
- termcolor==1.1.0
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- xxhash==3.2.0
- zipp==3.6.0
prefix: /opt/conda/envs/kafka-python
| [
"test/test_consumer.py::TestKafkaConsumer::test_subscription_copy"
]
| []
| [
"test/test_consumer.py::TestKafkaConsumer::test_fetch_max_wait_larger_than_request_timeout_raises",
"test/test_consumer.py::TestKafkaConsumer::test_non_integer_partitions",
"test/test_consumer.py::TestKafkaConsumer::test_session_timeout_larger_than_request_timeout_raises",
"test/test_consumer.py::TestMultiProcessConsumer::test_partition_list",
"test/test_consumer.py::TestSimpleConsumer::test_simple_consumer_commit_does_not_raise",
"test/test_consumer.py::TestSimpleConsumer::test_simple_consumer_failed_payloads",
"test/test_consumer.py::TestSimpleConsumer::test_simple_consumer_leader_change",
"test/test_consumer.py::TestSimpleConsumer::test_simple_consumer_reset_partition_offset",
"test/test_consumer.py::TestSimpleConsumer::test_simple_consumer_unknown_topic_partition"
]
| []
| Apache License 2.0 | 1,086 | [
"kafka/consumer/group.py"
]
| [
"kafka/consumer/group.py"
]
|
|
pypa__pip-4336 | 7adacc8b188bd6e9e39e3696528abbc264189682 | 2017-03-13 17:32:41 | 46e41dec1f30e2576a0a10abec18fb1a2150188d | diff --git a/.gitignore b/.gitignore
index 1eac77423..9ed67ea95 100644
--- a/.gitignore
+++ b/.gitignore
@@ -6,6 +6,7 @@ MANIFEST
.tox
.cache
*.egg
+*.eggs
*.py[cod]
*~
.coverage
diff --git a/news/4336.feature b/news/4336.feature
new file mode 100644
index 000000000..f27be7d09
--- /dev/null
+++ b/news/4336.feature
@@ -0,0 +1,3 @@
+Change pip outdated to use PackageFinder in order to do the version lookup
+so that local mirrors in Environments that do not have Internet connections
+can be used as the Source of Truth for latest version.
diff --git a/pip/basecommand.py b/pip/basecommand.py
index 49c134071..bd2f788a1 100644
--- a/pip/basecommand.py
+++ b/pip/basecommand.py
@@ -248,7 +248,7 @@ class Command(object):
options,
retries=0,
timeout=min(5, options.timeout)) as session:
- pip_version_check(session)
+ pip_version_check(session, options)
return SUCCESS
diff --git a/pip/models/index.py b/pip/models/index.py
index be9911988..3d0042e3a 100644
--- a/pip/models/index.py
+++ b/pip/models/index.py
@@ -7,7 +7,6 @@ class Index(object):
self.netloc = urllib_parse.urlsplit(url).netloc
self.simple_url = self.url_to_path('simple')
self.pypi_url = self.url_to_path('pypi')
- self.pip_json_url = self.url_to_path('pypi/pip/json')
def url_to_path(self, path):
return urllib_parse.urljoin(self.url, path)
diff --git a/pip/utils/outdated.py b/pip/utils/outdated.py
index 78ca2bbeb..bf6ff3fa7 100644
--- a/pip/utils/outdated.py
+++ b/pip/utils/outdated.py
@@ -10,7 +10,7 @@ from pip._vendor import lockfile
from pip._vendor.packaging import version as packaging_version
from pip.compat import WINDOWS
-from pip.models import PyPI
+from pip.index import PackageFinder
from pip.locations import USER_CACHE_DIR, running_under_virtualenv
from pip.utils import ensure_dir, get_installed_version
from pip.utils.filesystem import check_path_owner
@@ -92,7 +92,7 @@ def load_selfcheck_statefile():
return GlobalSelfCheckState()
-def pip_version_check(session):
+def pip_version_check(session, options):
"""Check for an update for pip.
Limit the frequency of checks to once per week. State is stored either in
@@ -100,7 +100,7 @@ def pip_version_check(session):
of the pip script path.
"""
installed_version = get_installed_version("pip")
- if installed_version is None:
+ if not installed_version:
return
pip_version = packaging_version.parse(installed_version)
@@ -121,18 +121,21 @@ def pip_version_check(session):
# Refresh the version if we need to or just see if we need to warn
if pypi_version is None:
- resp = session.get(
- PyPI.pip_json_url,
- headers={"Accept": "application/json"},
+ # Lets use PackageFinder to see what the latest pip version is
+ finder = PackageFinder(
+ find_links=options.find_links,
+ index_urls=[options.index_url] + options.extra_index_urls,
+ allow_all_prereleases=False, # Explicitly set to False
+ trusted_hosts=options.trusted_hosts,
+ process_dependency_links=options.process_dependency_links,
+ session=session,
+ )
+ all_candidates = finder.find_all_candidates("pip")
+ if not all_candidates:
+ return
+ pypi_version = str(
+ max(all_candidates, key=lambda c: c.version).version
)
- resp.raise_for_status()
- pypi_version = [
- v for v in sorted(
- list(resp.json()["releases"]),
- key=packaging_version.parse,
- )
- if not packaging_version.parse(v).is_prerelease
- ][-1]
# save that we've performed a check
state.save(pypi_version, current_time)
@@ -154,7 +157,6 @@ def pip_version_check(session):
"'%s install --upgrade pip' command.",
pip_version, pypi_version, pip_cmd
)
-
except Exception:
logger.debug(
"There was an error checking the latest version of pip",
diff --git a/setup.py b/setup.py
index 71c6b1593..7029f71d9 100644
--- a/setup.py
+++ b/setup.py
@@ -42,8 +42,14 @@ def find_version(*file_paths):
long_description = read('README.rst')
-tests_require = ['pytest', 'virtualenv>=1.10', 'scripttest>=1.3', 'mock',
- 'pretend']
+tests_require = [
+ 'pytest',
+ 'mock',
+ 'pretend'
+ 'scripttest>=1.3',
+ 'virtualenv>=1.10',
+ 'freezegun',
+]
setup(
| Can't change the index url for pip version check
There should be a possibility to do that. By existing option --index-url or a new one.
| pypa/pip | diff --git a/tests/unit/test_unit_outdated.py b/tests/unit/test_unit_outdated.py
index d2d9c3f14..24313c8f3 100644
--- a/tests/unit/test_unit_outdated.py
+++ b/tests/unit/test_unit_outdated.py
@@ -8,43 +8,73 @@ import pytest
import pretend
from pip._vendor import lockfile
+from pip.index import InstallationCandidate
from pip.utils import outdated
+class MockPackageFinder(object):
+
+ BASE_URL = 'https://pypi.python.org/simple/pip-{0}.tar.gz'
+ PIP_PROJECT_NAME = 'pip'
+ INSTALLATION_CANDIDATES = [
+ InstallationCandidate(PIP_PROJECT_NAME, '6.9.0',
+ BASE_URL.format('6.9.0')),
+ InstallationCandidate(PIP_PROJECT_NAME, '3.3.1',
+ BASE_URL.format('3.3.1')),
+ InstallationCandidate(PIP_PROJECT_NAME, '1.0',
+ BASE_URL.format('1.0')),
+ ]
+
+ def __init__(self, *args, **kwargs):
+ pass
+
+ def find_all_candidates(self, project_name):
+ return self.INSTALLATION_CANDIDATES
+
+
+def _options():
+ ''' Some default options that we pass to outdated.pip_version_check '''
+ return pretend.stub(
+ find_links=False, extra_index_urls=[], index_url='default_url',
+ pre=False, trusted_hosts=False, process_dependency_links=False,
+ )
+
+
@pytest.mark.parametrize(
- ['stored_time', 'newver', 'check', 'warn'],
[
- ('1970-01-01T10:00:00Z', '2.0', True, True),
- ('1970-01-01T10:00:00Z', '1.0', True, False),
- ('1970-01-06T10:00:00Z', '1.0', False, False),
- ('1970-01-06T10:00:00Z', '2.0', False, True),
+ 'stored_time',
+ 'installed_ver',
+ 'new_ver',
+ 'check_if_upgrade_required',
+ 'check_warn_logs',
+ ],
+ [
+ # Test we return None when installed version is None
+ ('1970-01-01T10:00:00Z', None, '1.0', False, False),
+ # Need an upgrade - upgrade warning should print
+ ('1970-01-01T10:00:00Z', '1.0', '6.9.0', True, True),
+ # No upgrade - upgrade warning should not print
+ ('1970-01-9T10:00:00Z', '6.9.0', '6.9.0', False, False),
]
)
-def test_pip_version_check(monkeypatch, stored_time, newver, check, warn):
- monkeypatch.setattr(outdated, 'get_installed_version', lambda name: '1.0')
-
- resp = pretend.stub(
- raise_for_status=pretend.call_recorder(lambda: None),
- json=pretend.call_recorder(lambda: {"releases": {newver: {}}}),
- )
- session = pretend.stub(
- get=pretend.call_recorder(lambda u, headers=None: resp),
- )
+def test_pip_version_check(monkeypatch, stored_time, installed_ver, new_ver,
+ check_if_upgrade_required, check_warn_logs):
+ monkeypatch.setattr(outdated, 'get_installed_version',
+ lambda name: installed_ver)
+ monkeypatch.setattr(outdated, 'PackageFinder', MockPackageFinder)
+ monkeypatch.setattr(outdated.logger, 'warning',
+ pretend.call_recorder(lambda *a, **kw: None))
+ monkeypatch.setattr(outdated.logger, 'debug',
+ pretend.call_recorder(lambda s, exc_info=None: None))
fake_state = pretend.stub(
- state={"last_check": stored_time, 'pypi_version': '1.0'},
+ state={"last_check": stored_time, 'pypi_version': installed_ver},
save=pretend.call_recorder(lambda v, t: None),
)
-
monkeypatch.setattr(
outdated, 'load_selfcheck_statefile', lambda: fake_state
)
- monkeypatch.setattr(outdated.logger, 'warning',
- pretend.call_recorder(lambda *a, **kw: None))
- monkeypatch.setattr(outdated.logger, 'debug',
- pretend.call_recorder(lambda s, exc_info=None: None))
-
with freezegun.freeze_time(
"1970-01-09 10:00:00",
ignore=[
@@ -52,26 +82,28 @@ def test_pip_version_check(monkeypatch, stored_time, newver, check, warn):
"pip._vendor.six.moves",
"pip._vendor.requests.packages.urllib3.packages.six.moves",
]):
- outdated.pip_version_check(session)
+ latest_pypi_version = outdated.pip_version_check(None, _options())
- assert not outdated.logger.debug.calls
-
- if check:
- assert session.get.calls == [pretend.call(
- "https://pypi.python.org/pypi/pip/json",
- headers={"Accept": "application/json"}
- )]
+ # See we return None if not installed_version
+ if not installed_ver:
+ assert not latest_pypi_version
+ # See that we saved the correct version
+ elif check_if_upgrade_required:
assert fake_state.save.calls == [
- pretend.call(newver, datetime.datetime(1970, 1, 9, 10, 00, 00)),
+ pretend.call(new_ver, datetime.datetime(1970, 1, 9, 10, 00, 00)),
]
- if warn:
- assert len(outdated.logger.warning.calls) == 1
- else:
- assert len(outdated.logger.warning.calls) == 0
else:
- assert session.get.calls == []
+ # Make sure no Exceptions
+ assert not outdated.logger.debug.calls
+ # See that save was not called
assert fake_state.save.calls == []
+ # Ensure we warn the user or not
+ if check_warn_logs:
+ assert len(outdated.logger.warning.calls) == 1
+ else:
+ assert len(outdated.logger.warning.calls) == 0
+
def test_virtualenv_state(monkeypatch):
CONTENT = '{"last_check": "1970-01-02T11:00:00Z", "pypi_version": "1.0"}'
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_added_files",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 5
} | 9.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-rerunfailures",
"pytest-timeout",
"pytest-xdist",
"freezegun",
"mock",
"pretend",
"pyyaml",
"setuptools>=39.2.0",
"scripttest",
"virtualenv",
"wheel"
],
"pre_install": null,
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | coverage==7.8.0
distlib==0.3.9
exceptiongroup==1.2.0
execnet==2.1.1
filelock==3.18.0
freezegun==1.5.1
iniconfig==1.1.1
mock==5.2.0
packaging==24.2
platformdirs==4.3.7
pluggy==1.5.0
pretend==1.0.9
pytest==8.3.4
pytest-cov==6.0.0
pytest-rerunfailures==15.0
pytest-timeout==2.3.1
pytest-xdist==3.6.1
python-dateutil==2.9.0.post0
PyYAML==6.0.2
scripttest==2.0
six==1.17.0
tomli==2.0.1
virtualenv==20.29.3
| name: pip
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==7.8.0
- distlib==0.3.9
- execnet==2.1.1
- filelock==3.18.0
- freezegun==1.5.1
- mock==5.2.0
- pip==10.0.0.dev0
- platformdirs==4.3.7
- pretend==1.0.9
- pytest-cov==6.0.0
- pytest-rerunfailures==15.0
- pytest-timeout==2.3.1
- pytest-xdist==3.6.1
- python-dateutil==2.9.0.post0
- pyyaml==6.0.2
- scripttest==2.0
- six==1.17.0
- virtualenv==20.29.3
prefix: /opt/conda/envs/pip
| [
"tests/unit/test_unit_outdated.py::test_pip_version_check[1970-01-01T10:00:00Z-None-1.0-False-False]",
"tests/unit/test_unit_outdated.py::test_pip_version_check[1970-01-01T10:00:00Z-1.0-6.9.0-True-True]",
"tests/unit/test_unit_outdated.py::test_pip_version_check[1970-01-9T10:00:00Z-6.9.0-6.9.0-False-False]"
]
| []
| [
"tests/unit/test_unit_outdated.py::test_virtualenv_state",
"tests/unit/test_unit_outdated.py::test_global_state"
]
| []
| MIT License | 1,087 | [
"pip/basecommand.py",
"pip/models/index.py",
"news/4336.feature",
"setup.py",
".gitignore",
"pip/utils/outdated.py"
]
| [
"pip/basecommand.py",
"pip/models/index.py",
"news/4336.feature",
"setup.py",
".gitignore",
"pip/utils/outdated.py"
]
|
|
rm-hull__luma.core-50 | 844e29b1fa9b8310ade6e6a0d4a5cce664fc9cca | 2017-03-13 20:23:11 | 3a09a197b7167dacad43f644c55d027036459d53 | thijstriemstra: work in progress..
thijstriemstra: Enabling warnings displays a lot of resource warnings: https://travis-ci.org/rm-hull/luma.core/jobs/210702792#L472. I'll fix those as well.
thijstriemstra: @rm-hull the resource warnings coming from `test_spritesheet` could be considered bugs because the luma code doesn't close the image for you. But I'm not sure where to close the image in the `spritesheet` class..
```
WW0 in tests/test_spritesheet.py:42 the following warning was recorded:
.tox/py35/lib/python3.5/site-packages/PIL/Image.py:1028: ResourceWarning: unclosed file <_io.BufferedReader name='tests/reference/runner.png'>
self.load()
```
rm-hull: Think you're right about the spritesheet - maybe adding a `__del__()` destructor method to the spritesheet class that calls `self.image.close()` ?
thijstriemstra: @rm-hull spritesheet probably needs a `close` method. This can be called manually and if spritesheet would support context manager usage, `close` could be called there.
thijstriemstra: Should be ready for review.
rm-hull: Just reading the docs for https://pillow.readthedocs.io/en/4.0.x/reference/Image.html#PIL.Image.Image.load
This implies as soon as an image is accessed, `load` is called and the underlying file pointer is closed. Are we unnecessarily complicating things? Are the warnings actually just false alarms?
thijstriemstra: Pillow seems buggy with closing them. Pillow should also support the `with` keyword but it's not working. Also see http://stackoverflow.com/a/38542329
rm-hull: Can you squash & merge when everything goes green pls ? | diff --git a/luma/core/serial.py b/luma/core/serial.py
index f908e53..13d4c6f 100644
--- a/luma/core/serial.py
+++ b/luma/core/serial.py
@@ -12,6 +12,7 @@ import errno
import luma.core.error
from luma.core import lib
+from luma.core.util import deprecation
__all__ = ["i2c", "spi"]
@@ -135,17 +136,30 @@ class spi(object):
only support maxium of 64 or 128 bytes, whereas RPi/py-spidev supports
4096 (default).
:type transfer_size: int
- :param bcm_DC: The BCM pin to connect data/command select (DC) to (defaults to 24).
+ :param gpio_DC: The GPIO pin to connect data/command select (DC) to (defaults to 24).
+ :type gpio_DC: int
+ :param gpio_RST: The GPIO pin to connect reset (RES / RST) to (defaults to 24).
+ :type gpio_RST: int
+ :param bcm_DC: Deprecated. Use ``gpio_DC`` instead.
:type bcm_DC: int
- :param bcm_RST: The BCM pin to connect reset (RES / RST) to (defaults to 24).
+ :param bcm_RST: Deprecated. Use ``gpio_RST`` instead.
:type bcm_RST: int
:raises luma.core.error.DeviceNotFoundError: SPI device could not be found.
:raises luma.core.error.UnsupportedPlatform: GPIO access not available.
"""
def __init__(self, spi=None, gpio=None, port=0, device=0,
bus_speed_hz=8000000, transfer_size=4096,
- bcm_DC=24, bcm_RST=25):
+ gpio_DC=24, gpio_RST=25, bcm_DC=None, bcm_RST=None):
assert(bus_speed_hz in [mhz * 1000000 for mhz in [0.5, 1, 2, 4, 8, 16, 32]])
+
+ if bcm_DC is not None:
+ deprecation('bcm_DC argument is deprecated in favor of gpio_DC and will be removed in 1.0.0')
+ gpio_DC = bcm_DC
+
+ if bcm_RST is not None:
+ deprecation('bcm_RST argument is deprecated in favor of gpio_RST and will be removed in 1.0.0')
+ gpio_RST = bcm_RST
+
self._gpio = gpio or self.__rpi_gpio__()
self._spi = spi or self.__spidev__()
@@ -159,16 +173,16 @@ class spi(object):
self._transfer_size = transfer_size
self._spi.max_speed_hz = bus_speed_hz
- self._bcm_DC = bcm_DC
- self._bcm_RST = bcm_RST
+ self._gpio_DC = gpio_DC
+ self._gpio_RST = gpio_RST
self._cmd_mode = self._gpio.LOW # Command mode = Hold low
self._data_mode = self._gpio.HIGH # Data mode = Pull high
self._gpio.setmode(self._gpio.BCM)
- self._gpio.setup(self._bcm_DC, self._gpio.OUT)
- self._gpio.setup(self._bcm_RST, self._gpio.OUT)
- self._gpio.output(self._bcm_RST, self._gpio.LOW) # Reset device
- self._gpio.output(self._bcm_RST, self._gpio.HIGH) # Keep RESET pulled high
+ self._gpio.setup(self._gpio_DC, self._gpio.OUT)
+ self._gpio.setup(self._gpio_RST, self._gpio.OUT)
+ self._gpio.output(self._gpio_RST, self._gpio.LOW) # Reset device
+ self._gpio.output(self._gpio_RST, self._gpio.HIGH) # Keep RESET pulled high
def command(self, *cmd):
"""
@@ -177,7 +191,7 @@ class spi(object):
:param cmd: a spread of commands
:type cmd: int
"""
- self._gpio.output(self._bcm_DC, self._cmd_mode)
+ self._gpio.output(self._gpio_DC, self._cmd_mode)
self._spi.writebytes(list(cmd))
def data(self, data):
@@ -188,7 +202,7 @@ class spi(object):
:param data: a data sequence
:type data: list, bytearray
"""
- self._gpio.output(self._bcm_DC, self._data_mode)
+ self._gpio.output(self._gpio_DC, self._data_mode)
i = 0
n = len(data)
tx_sz = self._transfer_size
diff --git a/luma/core/sprite_system.py b/luma/core/sprite_system.py
index 49d1a01..5c68280 100755
--- a/luma/core/sprite_system.py
+++ b/luma/core/sprite_system.py
@@ -10,6 +10,7 @@ Simplified sprite animation framework.
"""
import time
+
from PIL import Image
@@ -66,7 +67,9 @@ class spritesheet(object):
Loosely based on http://www.createjs.com/docs/easeljs/classes/SpriteSheet.html
"""
def __init__(self, image, frames, animations):
- self.image = Image.open(image)
+ with open(image, 'rb') as fp:
+ self.image = Image.open(fp)
+ self.image.load()
self.frames = dict_wrapper(frames)
self.animations = dict_wrapper(animations)
# Reframe the sprite map in terms of the registration point (if set)
diff --git a/luma/core/util.py b/luma/core/util.py
new file mode 100644
index 0000000..b570518
--- /dev/null
+++ b/luma/core/util.py
@@ -0,0 +1,12 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2017 Richard Hull and contributors
+# See LICENSE.rst for details.
+
+import warnings
+
+
+__all__ = ["deprecation"]
+
+
+def deprecation(message):
+ warnings.warn(message, DeprecationWarning, stacklevel=2)
diff --git a/setup.py b/setup.py
index 14fb26e..aee7ad3 100644
--- a/setup.py
+++ b/setup.py
@@ -18,7 +18,7 @@ version = read_file("VERSION.txt").strip()
needs_pytest = {'pytest', 'test', 'ptr'}.intersection(sys.argv)
pytest_runner = ['pytest-runner'] if needs_pytest else []
-test_deps = ["mock", "pytest", "pytest-cov"]
+test_deps = ["mock", "pytest", "pytest-cov", "pytest-warnings"]
setup(
name="luma.core",
diff --git a/tox.ini b/tox.ini
index 96c74b0..e759594 100644
--- a/tox.ini
+++ b/tox.ini
@@ -8,7 +8,7 @@ skip_missing_interpreters = True
[testenv]
commands =
python setup.py install
- coverage run -m py.test -v
+ coverage run -m py.test -v -r w
coverage report
deps =
.[test]
| Deprecate SPI params bcm_DC, bcm_RST in favour of gpio_DC, gpio_RST
See https://github.com/rm-hull/luma.core/blob/master/luma/core/serial.py#L96
*bcm* is shorthand for 'broadcom' pin assignment, and is specific to RPi implementations, whereas *gpio* is a more general term, and should be used if ever a generic GPIO library replaces RPi.GPIO | rm-hull/luma.core | diff --git a/tests/helpers.py b/tests/helpers.py
new file mode 100644
index 0000000..cf41d46
--- /dev/null
+++ b/tests/helpers.py
@@ -0,0 +1,17 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2017 Richard Hull and contributors
+# See LICENSE.rst for details.
+
+"""
+Test helpers.
+"""
+
+
+import os.path
+
+
+def get_reference_image(fname):
+ return os.path.abspath(os.path.join(
+ os.path.dirname(__file__),
+ 'reference',
+ fname))
diff --git a/tests/test_dummy.py b/tests/test_dummy.py
index 1f90103..00dd2a8 100644
--- a/tests/test_dummy.py
+++ b/tests/test_dummy.py
@@ -7,13 +7,13 @@
Tests for the :py:class:`luma.core.device.dummy` class.
"""
-import os.path
from PIL import Image, ImageChops
from luma.core.render import canvas
from luma.core.device import dummy
import baseline_data
+from helpers import get_reference_image
def test_capture_noops():
@@ -28,36 +28,33 @@ def test_capture_noops():
def test_portrait():
- reference = Image.open(
- os.path.abspath(os.path.join(
- os.path.dirname(__file__),
- 'reference',
- 'portrait.png')))
+ img_path = get_reference_image('portrait.png')
- device = dummy(rotate=1)
+ with open(img_path, 'rb') as p:
+ reference = Image.open(p)
- # Use the same drawing primitives as the demo
- with canvas(device) as draw:
- baseline_data.primitives(device, draw)
+ device = dummy(rotate=1)
- bbox = ImageChops.difference(reference, device.image).getbbox()
- assert bbox is None
+ # Use the same drawing primitives as the demo
+ with canvas(device) as draw:
+ baseline_data.primitives(device, draw)
+
+ bbox = ImageChops.difference(reference, device.image).getbbox()
+ assert bbox is None
def test_dither():
- reference = Image.open(
- os.path.abspath(os.path.join(
- os.path.dirname(__file__),
- 'reference',
- 'dither.png')))
-
- device = dummy(mode="1")
-
- with canvas(device, dither=True) as draw:
- draw.rectangle((0, 0, 64, 32), fill="red")
- draw.rectangle((64, 0, 128, 32), fill="yellow")
- draw.rectangle((0, 32, 64, 64), fill="blue")
- draw.rectangle((64, 32, 128, 64), fill="white")
-
- bbox = ImageChops.difference(reference, device.image).getbbox()
- assert bbox is None
+ img_path = get_reference_image('dither.png')
+
+ with open(img_path, 'rb') as p:
+ reference = Image.open(p)
+ device = dummy(mode="1")
+
+ with canvas(device, dither=True) as draw:
+ draw.rectangle((0, 0, 64, 32), fill="red")
+ draw.rectangle((64, 0, 128, 32), fill="yellow")
+ draw.rectangle((0, 32, 64, 64), fill="blue")
+ draw.rectangle((64, 32, 128, 64), fill="white")
+
+ bbox = ImageChops.difference(reference, device.image).getbbox()
+ assert bbox is None
diff --git a/tests/test_serial.py b/tests/test_serial.py
index 9a76f6e..0d78472 100644
--- a/tests/test_serial.py
+++ b/tests/test_serial.py
@@ -138,14 +138,42 @@ def verify_spi_init(port, device, bus_speed_hz=8000000, dc=24, rst=25):
def test_spi_init():
- spi(gpio=gpio, spi=spidev, port=5, device=2, bus_speed_hz=16000000, bcm_DC=17, bcm_RST=11)
- verify_spi_init(5, 2, 16000000, 17, 11)
+ port = 5
+ device = 2
+ bus_speed = 16000000
+ dc = 17
+ rst = 11
+
+ spi(gpio=gpio, spi=spidev, port=port, device=device, bus_speed_hz=16000000,
+ gpio_DC=dc, gpio_RST=rst)
+ verify_spi_init(port, device, bus_speed, dc, rst)
gpio.output.assert_has_calls([
- call(11, gpio.LOW),
- call(11, gpio.HIGH)
+ call(rst, gpio.LOW),
+ call(rst, gpio.HIGH)
])
+def test_spi_init_params_deprecated():
+ port = 5
+ device = 2
+ bus_speed = 16000000
+ dc = 80
+ rst = 90
+ msg1 = 'bcm_DC argument is deprecated in favor of gpio_DC and will be removed in 1.0.0'
+ msg2 = 'bcm_RST argument is deprecated in favor of gpio_RST and will be removed in 1.0.0'
+
+ with pytest.deprecated_call() as c:
+ spi(gpio=gpio, spi=spidev, port=port, device=device,
+ bus_speed_hz=bus_speed, bcm_DC=dc, bcm_RST=rst)
+ verify_spi_init(port, device, bus_speed, dc, rst)
+ gpio.output.assert_has_calls([
+ call(rst, gpio.LOW),
+ call(rst, gpio.HIGH)
+ ])
+ assert str(c.list[0].message) == msg1
+ assert str(c.list[1].message) == msg2
+
+
def test_spi_init_invalid_bus_speed():
with pytest.raises(AssertionError):
spi(gpio=gpio, spi=spidev, port=5, device=2, bus_speed_hz=942312)
diff --git a/tests/test_spritesheet.py b/tests/test_spritesheet.py
index 56a5636..fce267a 100644
--- a/tests/test_spritesheet.py
+++ b/tests/test_spritesheet.py
@@ -12,6 +12,7 @@ import PIL
from luma.core.sprite_system import spritesheet
+
data = {
'image': "tests/reference/runner.png",
'frames': {
diff --git a/tests/test_terminal.py b/tests/test_terminal.py
index 6a7f06c..fbfd8cf 100644
--- a/tests/test_terminal.py
+++ b/tests/test_terminal.py
@@ -7,112 +7,101 @@
Tests for the :py:class:`luma.core.virtual.terminal` class.
"""
-import os.path
from PIL import Image, ImageChops
from luma.core.device import dummy
from luma.core.virtual import terminal
+from helpers import get_reference_image
+
def test_wrapped_text():
- reference = Image.open(
- os.path.abspath(os.path.join(
- os.path.dirname(__file__),
- 'reference',
- 'quick_brown_fox.png')))
+ img_path = get_reference_image('quick_brown_fox.png')
- device = dummy()
- term = terminal(device)
+ with open(img_path, 'rb') as p:
+ reference = Image.open(p)
+ device = dummy()
+ term = terminal(device)
- term.println("The quick brown fox jumps over the lazy dog")
+ term.println("The quick brown fox jumps over the lazy dog")
- bbox = ImageChops.difference(reference, device.image).getbbox()
- assert bbox is None
+ bbox = ImageChops.difference(reference, device.image).getbbox()
+ assert bbox is None
def test_tab_alignment():
- reference = Image.open(
- os.path.abspath(os.path.join(
- os.path.dirname(__file__),
- 'reference',
- 'tab_align.png')))
+ img_path = get_reference_image('tab_align.png')
- device = dummy()
- term = terminal(device, animate=False)
+ with open(img_path, 'rb') as p:
+ reference = Image.open(p)
+ device = dummy()
+ term = terminal(device, animate=False)
- term.println("1\t32\t999")
- term.println("999\t1\t32")
+ term.println("1\t32\t999")
+ term.println("999\t1\t32")
- bbox = ImageChops.difference(reference, device.image).getbbox()
- assert bbox is None
+ bbox = ImageChops.difference(reference, device.image).getbbox()
+ assert bbox is None
def test_control_chars():
- reference = Image.open(
- os.path.abspath(os.path.join(
- os.path.dirname(__file__),
- 'reference',
- 'control_chars.png')))
+ img_path = get_reference_image('control_chars.png')
- device = dummy()
- term = terminal(device, animate=False)
+ with open(img_path, 'rb') as p:
+ reference = Image.open(p)
+ device = dummy()
+ term = terminal(device, animate=False)
- term.println('foo\rbar\bspam\teggs\n\nham and cheese on rye')
+ term.println('foo\rbar\bspam\teggs\n\nham and cheese on rye')
- bbox = ImageChops.difference(reference, device.image).getbbox()
- assert bbox is None
+ bbox = ImageChops.difference(reference, device.image).getbbox()
+ assert bbox is None
def test_scrolling():
- reference = Image.open(
- os.path.abspath(os.path.join(
- os.path.dirname(__file__),
- 'reference',
- 'scroll_text.png')))
+ img_path = get_reference_image('scroll_text.png')
- device = dummy()
- term = terminal(device, animate=False)
+ with open(img_path, 'rb') as p:
+ reference = Image.open(p)
+ device = dummy()
+ term = terminal(device, animate=False)
- term.println(
- "it oozed over the blackness, and heard Harris's sleepy voice asking " +
- "where we drew near it, so they spread their handkerchiefs on the back " +
- "of Harris and Harris's friend as to avoid running down which, we managed " +
- "to get out of here while this billing and cooing is on. We'll go down " +
- "to eat vegetables. He said they were demons.")
+ term.println(
+ "it oozed over the blackness, and heard Harris's sleepy voice asking " +
+ "where we drew near it, so they spread their handkerchiefs on the back " +
+ "of Harris and Harris's friend as to avoid running down which, we managed " +
+ "to get out of here while this billing and cooing is on. We'll go down " +
+ "to eat vegetables. He said they were demons.")
- bbox = ImageChops.difference(reference, device.image).getbbox()
- assert bbox is None
+ bbox = ImageChops.difference(reference, device.image).getbbox()
+ assert bbox is None
def test_alt_colors():
- reference = Image.open(
- os.path.abspath(os.path.join(
- os.path.dirname(__file__),
- 'reference',
- 'alt_colors.png')))
+ img_path = get_reference_image('alt_colors.png')
- device = dummy()
- term = terminal(device, color="blue", bgcolor="grey", animate=False)
+ with open(img_path, 'rb') as p:
+ reference = Image.open(p)
+ device = dummy()
+ term = terminal(device, color="blue", bgcolor="grey", animate=False)
- term.println("blue on grey")
+ term.println("blue on grey")
- bbox = ImageChops.difference(reference, device.image).getbbox()
- assert bbox is None
+ bbox = ImageChops.difference(reference, device.image).getbbox()
+ assert bbox is None
def test_ansi_colors():
- reference = Image.open(
- os.path.abspath(os.path.join(
- os.path.dirname(__file__),
- 'reference',
- 'ansi_colors.png')))
+ img_path = get_reference_image('ansi_colors.png')
- device = dummy()
- term = terminal(device)
+ with open(img_path, 'rb') as p:
+ reference = Image.open(p)
+ device = dummy()
+ term = terminal(device)
- term.println("hello \033[31mworld\33[0m")
- term.println("this is \033[7mreversed\033[7m!")
- term.println("\033[45;37mYellow\033[0m \033[43;30mMagenta")
+ term.println("hello \033[31mworld\33[0m")
+ term.println("this is \033[7mreversed\033[7m!")
+ term.println("\033[45;37mYellow\033[0m \033[43;30mMagenta")
- bbox = ImageChops.difference(reference, device.image).getbbox()
- assert bbox is None
+ bbox = ImageChops.difference(reference, device.image).getbbox()
+ assert bbox is None
diff --git a/tests/test_viewport.py b/tests/test_viewport.py
index 88f6e6d..be7060e 100644
--- a/tests/test_viewport.py
+++ b/tests/test_viewport.py
@@ -9,7 +9,7 @@ helpers.
"""
import time
-import os.path
+
from PIL import Image, ImageChops
from luma.core.device import dummy
@@ -17,6 +17,7 @@ from luma.core.render import canvas
from luma.core.virtual import range_overlap, hotspot, snapshot, viewport
import baseline_data
+from helpers import get_reference_image
def overlap(box1, box2):
@@ -107,42 +108,38 @@ def test_snapshot_last_updated():
def test_viewport_set_position():
- reference = Image.open(
- os.path.abspath(os.path.join(
- os.path.dirname(__file__),
- 'reference',
- 'set_position.png')))
+ img_path = get_reference_image('set_position.png')
- device = dummy()
- virtual = viewport(device, 200, 200)
+ with open(img_path, 'rb') as p:
+ reference = Image.open(p)
+ device = dummy()
+ virtual = viewport(device, 200, 200)
- # Use the same drawing primitives as the demo
- with canvas(virtual) as draw:
- baseline_data.primitives(virtual, draw)
+ # Use the same drawing primitives as the demo
+ with canvas(virtual) as draw:
+ baseline_data.primitives(virtual, draw)
- virtual.set_position((20, 30))
- bbox = ImageChops.difference(reference, device.image).getbbox()
- assert bbox is None
+ virtual.set_position((20, 30))
+ bbox = ImageChops.difference(reference, device.image).getbbox()
+ assert bbox is None
def test_viewport_hotspot():
- reference = Image.open(
- os.path.abspath(os.path.join(
- os.path.dirname(__file__),
- 'reference',
- 'hotspot.png')))
+ img_path = get_reference_image('hotspot.png')
- device = dummy()
- virtual = viewport(device, 200, 200)
+ with open(img_path, 'rb') as p:
+ reference = Image.open(p)
+ device = dummy()
+ virtual = viewport(device, 200, 200)
- def draw_fn(draw, width, height):
- baseline_data.primitives(device, draw)
+ def draw_fn(draw, width, height):
+ baseline_data.primitives(device, draw)
- widget = hotspot(device.width, device.height, draw_fn)
+ widget = hotspot(device.width, device.height, draw_fn)
- virtual.add_hotspot(widget, (19, 56))
- virtual.set_position((28, 30))
- virtual.remove_hotspot(widget, (19, 56))
+ virtual.add_hotspot(widget, (19, 56))
+ virtual.set_position((28, 30))
+ virtual.remove_hotspot(widget, (19, 56))
- bbox = ImageChops.difference(reference, device.image).getbbox()
- assert bbox is None
+ bbox = ImageChops.difference(reference, device.image).getbbox()
+ assert bbox is None
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_added_files",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 2,
"test_score": 1
},
"num_modified_files": 4
} | 0.6 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[test]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"mock",
"pytest",
"pytest-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y i2c-tools"
],
"python": "3.7",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs @ file:///croot/attrs_1668696182826/work
certifi @ file:///croot/certifi_1671487769961/work/certifi
coverage==7.2.7
flit_core @ file:///opt/conda/conda-bld/flit-core_1644941570762/work/source/flit_core
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1648562407465/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
-e git+https://github.com/rm-hull/luma.core.git@844e29b1fa9b8310ade6e6a0d4a5cce664fc9cca#egg=luma.core
mock==5.2.0
packaging @ file:///croot/packaging_1671697413597/work
Pillow==9.5.0
pluggy @ file:///tmp/build/80754af9/pluggy_1648042572264/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pytest==7.1.2
pytest-cov==4.1.0
RPi.GPIO==0.7.1
smbus2==0.5.0
spidev==3.6
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
typing_extensions @ file:///croot/typing_extensions_1669924550328/work
zipp @ file:///croot/zipp_1672387121353/work
| name: luma.core
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=22.1.0=py37h06a4308_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- flit-core=3.6.0=pyhd3eb1b0_0
- importlib-metadata=4.11.3=py37h06a4308_0
- importlib_metadata=4.11.3=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=22.0=py37h06a4308_0
- pip=22.3.1=py37h06a4308_0
- pluggy=1.0.0=py37h06a4308_1
- py=1.11.0=pyhd3eb1b0_0
- pytest=7.1.2=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py37h06a4308_0
- typing_extensions=4.4.0=py37h06a4308_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zipp=3.11.0=py37h06a4308_0
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==7.2.7
- mock==5.2.0
- pillow==9.5.0
- pytest-cov==4.1.0
- rpi-gpio==0.7.1
- smbus2==0.5.0
- spidev==3.6
prefix: /opt/conda/envs/luma.core
| [
"tests/test_serial.py::test_spi_init",
"tests/test_serial.py::test_spi_init_params_deprecated"
]
| [
"tests/test_dummy.py::test_portrait",
"tests/test_serial.py::test_i2c_init_device_permission_error",
"tests/test_viewport.py::test_viewport_set_position",
"tests/test_viewport.py::test_viewport_hotspot"
]
| [
"tests/test_dummy.py::test_capture_noops",
"tests/test_dummy.py::test_dither",
"tests/test_serial.py::test_i2c_init_device_not_found",
"tests/test_serial.py::test_i2c_init_device_address_error",
"tests/test_serial.py::test_i2c_init_no_bus",
"tests/test_serial.py::test_i2c_init_bus_provided",
"tests/test_serial.py::test_i2c_command",
"tests/test_serial.py::test_i2c_command_device_not_found_error",
"tests/test_serial.py::test_i2c_data",
"tests/test_serial.py::test_i2c_data_chunked",
"tests/test_serial.py::test_i2c_cleanup",
"tests/test_serial.py::test_spi_init_invalid_bus_speed",
"tests/test_serial.py::test_spi_command",
"tests/test_serial.py::test_spi_data",
"tests/test_serial.py::test_spi_cleanup",
"tests/test_serial.py::test_spi_init_device_not_found",
"tests/test_serial.py::test_spi_unsupported_gpio_platform",
"tests/test_spritesheet.py::test_init",
"tests/test_spritesheet.py::test_len",
"tests/test_spritesheet.py::test_caching",
"tests/test_spritesheet.py::test_get",
"tests/test_spritesheet.py::test_get_string",
"tests/test_spritesheet.py::test_get_outofrange",
"tests/test_spritesheet.py::test_animate_unknown_seq",
"tests/test_spritesheet.py::test_animate_finite_seq",
"tests/test_spritesheet.py::test_animate_slow_seq",
"tests/test_spritesheet.py::test_animate_infinite_seq",
"tests/test_spritesheet.py::test_animate_subroutine",
"tests/test_terminal.py::test_wrapped_text",
"tests/test_terminal.py::test_tab_alignment",
"tests/test_terminal.py::test_control_chars",
"tests/test_terminal.py::test_scrolling",
"tests/test_terminal.py::test_alt_colors",
"tests/test_terminal.py::test_ansi_colors",
"tests/test_viewport.py::test_range_overlap_over12",
"tests/test_viewport.py::test_range_overlap_over123",
"tests/test_viewport.py::test_range_overlap_over23",
"tests/test_viewport.py::test_range_overlap_over234",
"tests/test_viewport.py::test_range_overlap_over34",
"tests/test_viewport.py::test_range_overlap_over4",
"tests/test_viewport.py::test_range_overlap_over_none",
"tests/test_viewport.py::test_snapshot_last_updated"
]
| []
| MIT License | 1,088 | [
"luma/core/serial.py",
"setup.py",
"tox.ini",
"luma/core/util.py",
"luma/core/sprite_system.py"
]
| [
"luma/core/serial.py",
"setup.py",
"tox.ini",
"luma/core/util.py",
"luma/core/sprite_system.py"
]
|
asottile__pyupgrade-9 | 47f2b57f4b8ea963617199885ff46e0af7e63e45 | 2017-03-13 22:27:20 | c5f1eb3fd850fc793cd0208cc449f3be5718f052 | diff --git a/pyupgrade.py b/pyupgrade.py
index d2a6990..eafc8a3 100644
--- a/pyupgrade.py
+++ b/pyupgrade.py
@@ -46,6 +46,7 @@ def unparse_parsed_string(parsed):
UNIMPORTANT_WS = 'UNIMPORTANT_WS'
+NON_CODING_TOKENS = frozenset(('COMMENT', 'NL', UNIMPORTANT_WS))
Token = collections.namedtuple(
'Token', ('name', 'src', 'line', 'utf8_byte_offset'),
)
@@ -145,7 +146,7 @@ def _fix_format_literals(contents_text):
to_replace.append((string_start, string_end))
string_start, string_end, seen_dot = None, None, False
# NL is the non-breaking newline token
- elif token.name not in ('COMMENT', 'NL', UNIMPORTANT_WS):
+ elif token.name not in NON_CODING_TOKENS:
string_start, string_end, seen_dot = None, None, False
for start, end in reversed(to_replace):
@@ -291,6 +292,13 @@ def _get_victims(tokens, start, arg):
i += 1
+ # May need to remove a trailing comma
+ i -= 2
+ while tokens[i].name in NON_CODING_TOKENS:
+ i -= 1
+ if tokens[i].src == ',':
+ ends = sorted(set(ends + [i]))
+
return Victims(starts, ends, first_comma_index, arg_index)
| SyntaxError rewriting dict(thing,) or set(thing,) (trailing comma)
For example
```python
>>> set(x for x in range(1),)
{0}
>>> {x for x in range(1),}
File "<stdin>", line 1
{x for x in range(1),}
^
SyntaxError: invalid syntax
``` | asottile/pyupgrade | diff --git a/tests/pyupgrade_test.py b/tests/pyupgrade_test.py
index b2ff30f..7b48712 100644
--- a/tests/pyupgrade_test.py
+++ b/tests/pyupgrade_test.py
@@ -130,6 +130,17 @@ def test_roundtrip_tokenize(filename):
# Remove trailing commas on inline things
('set((1,))', '{1}'),
('set((1, ))', '{1}'),
+ # Remove trailing commas after things
+ ('set([1, 2, 3,],)', '{1, 2, 3}'),
+ ('set(x for x in y,)', '{x for x in y}'),
+ (
+ 'set(\n'
+ ' x for x in y,\n'
+ ')',
+ '{\n'
+ ' x for x in y\n'
+ '}',
+ ),
),
)
def test_sets(s, expected):
@@ -167,6 +178,25 @@ def test_sets(s, expected):
),
# This doesn't get fixed by autopep8 and can cause a syntax error
('dict((a, b)for a, b in y)', '{a: b for a, b in y}'),
+ # Need to remove trailing commas on the element
+ (
+ 'dict(\n'
+ ' (\n'
+ ' a,\n'
+ ' b,\n'
+ ' )\n'
+ ' for a, b in y\n'
+ ')',
+ # Ideally, this'll go through some other formatting tool before
+ # being committed. Shrugs!
+ '{\n'
+ ' \n'
+ ' a:\n'
+ ' b\n'
+ ' \n'
+ ' for a, b in y\n'
+ '}',
+ ),
),
)
def test_dictcomps(s, expected):
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 1
} | 1.0 | {
"env_vars": null,
"env_yml_path": [],
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [],
"python": "3.6",
"reqs_path": [
"requirements-dev.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
cfgv==3.3.1
coverage==6.2
distlib==0.3.9
filelock==3.4.1
flake8==5.0.4
identify==2.4.4
importlib-metadata==4.2.0
importlib-resources==5.2.3
iniconfig==1.1.1
mccabe==0.7.0
nodeenv==1.6.0
packaging==21.3
platformdirs==2.4.0
pluggy==1.0.0
pre-commit==2.17.0
py==1.11.0
pycodestyle==2.9.1
pyflakes==2.5.0
pyparsing==3.1.4
pytest==7.0.1
-e git+https://github.com/asottile/pyupgrade.git@47f2b57f4b8ea963617199885ff46e0af7e63e45#egg=pyupgrade
PyYAML==6.0.1
toml==0.10.2
tomli==1.2.3
typing_extensions==4.1.1
virtualenv==20.16.2
zipp==3.6.0
| name: pyupgrade
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- cfgv==3.3.1
- coverage==6.2
- distlib==0.3.9
- filelock==3.4.1
- flake8==5.0.4
- identify==2.4.4
- importlib-metadata==4.2.0
- importlib-resources==5.2.3
- iniconfig==1.1.1
- mccabe==0.7.0
- nodeenv==1.6.0
- packaging==21.3
- platformdirs==2.4.0
- pluggy==1.0.0
- pre-commit==2.17.0
- py==1.11.0
- pycodestyle==2.9.1
- pyflakes==2.5.0
- pyparsing==3.1.4
- pytest==7.0.1
- pyyaml==6.0.1
- toml==0.10.2
- tomli==1.2.3
- typing-extensions==4.1.1
- virtualenv==20.16.2
- zipp==3.6.0
prefix: /opt/conda/envs/pyupgrade
| [
"tests/pyupgrade_test.py::test_sets[set(\\n",
"tests/pyupgrade_test.py::test_dictcomps[dict(\\n"
]
| []
| [
"tests/pyupgrade_test.py::test_roundtrip_text[]",
"tests/pyupgrade_test.py::test_roundtrip_text[foo]",
"tests/pyupgrade_test.py::test_roundtrip_text[{}]",
"tests/pyupgrade_test.py::test_roundtrip_text[{0}]",
"tests/pyupgrade_test.py::test_roundtrip_text[{named}]",
"tests/pyupgrade_test.py::test_roundtrip_text[{!r}]",
"tests/pyupgrade_test.py::test_roundtrip_text[{:>5}]",
"tests/pyupgrade_test.py::test_roundtrip_text[{{]",
"tests/pyupgrade_test.py::test_roundtrip_text[}}]",
"tests/pyupgrade_test.py::test_roundtrip_text[{0!s:15}]",
"tests/pyupgrade_test.py::test_intentionally_not_round_trip[{:}-{}]",
"tests/pyupgrade_test.py::test_intentionally_not_round_trip[{0:}-{0}]",
"tests/pyupgrade_test.py::test_intentionally_not_round_trip[{0!r:}-{0!r}]",
"tests/pyupgrade_test.py::test_tokenize_src_simple",
"tests/pyupgrade_test.py::test_roundtrip_tokenize[testing/resources/empty.py]",
"tests/pyupgrade_test.py::test_roundtrip_tokenize[testing/resources/unicode_snowman.py]",
"tests/pyupgrade_test.py::test_roundtrip_tokenize[testing/resources/backslash_continuation.py]",
"tests/pyupgrade_test.py::test_sets[set()-set()]",
"tests/pyupgrade_test.py::test_sets[set((\\n))-set((\\n))]",
"tests/pyupgrade_test.py::test_sets[set",
"tests/pyupgrade_test.py::test_sets[set(())-set()]",
"tests/pyupgrade_test.py::test_sets[set([])-set()]",
"tests/pyupgrade_test.py::test_sets[set((",
"tests/pyupgrade_test.py::test_sets[set((1,",
"tests/pyupgrade_test.py::test_sets[set([1,",
"tests/pyupgrade_test.py::test_sets[set(x",
"tests/pyupgrade_test.py::test_sets[set([x",
"tests/pyupgrade_test.py::test_sets[set((x",
"tests/pyupgrade_test.py::test_sets[set(((1,",
"tests/pyupgrade_test.py::test_sets[set((a,",
"tests/pyupgrade_test.py::test_sets[set([(1,",
"tests/pyupgrade_test.py::test_sets[set([((1,",
"tests/pyupgrade_test.py::test_sets[set((((1,",
"tests/pyupgrade_test.py::test_sets[set(\\n(1,",
"tests/pyupgrade_test.py::test_sets[set((\\n1,\\n2,\\n))\\n-{\\n1,\\n2,\\n}\\n]",
"tests/pyupgrade_test.py::test_sets[set((frozenset(set((1,",
"tests/pyupgrade_test.py::test_sets[set((1,))-{1}]",
"tests/pyupgrade_test.py::test_dictcomps[x",
"tests/pyupgrade_test.py::test_dictcomps[dict()-dict()]",
"tests/pyupgrade_test.py::test_dictcomps[(-(]",
"tests/pyupgrade_test.py::test_dictcomps[dict",
"tests/pyupgrade_test.py::test_dictcomps[dict((a,",
"tests/pyupgrade_test.py::test_dictcomps[dict([a,",
"tests/pyupgrade_test.py::test_dictcomps[dict(((a,",
"tests/pyupgrade_test.py::test_dictcomps[dict([(a,",
"tests/pyupgrade_test.py::test_dictcomps[dict(((a),",
"tests/pyupgrade_test.py::test_dictcomps[dict((k,",
"tests/pyupgrade_test.py::test_format_literals['{}'.format(1)-'{}'.format(1)]",
"tests/pyupgrade_test.py::test_format_literals['{'.format(1)-'{'.format(1)]",
"tests/pyupgrade_test.py::test_format_literals['}'.format(1)-'}'.format(1)]",
"tests/pyupgrade_test.py::test_format_literals[x",
"tests/pyupgrade_test.py::test_format_literals['{0}'.format(1)-'{}'.format(1)]",
"tests/pyupgrade_test.py::test_format_literals['''{0}\\n{1}\\n'''.format(1,",
"tests/pyupgrade_test.py::test_format_literals['{0}'",
"tests/pyupgrade_test.py::test_format_literals[print(\\n",
"tests/pyupgrade_test.py::test_imports_unicode_literals[import",
"tests/pyupgrade_test.py::test_imports_unicode_literals[from",
"tests/pyupgrade_test.py::test_imports_unicode_literals[x",
"tests/pyupgrade_test.py::test_imports_unicode_literals[\"\"\"docstring\"\"\"\\nfrom",
"tests/pyupgrade_test.py::test_unicode_literals[(-False-(]",
"tests/pyupgrade_test.py::test_unicode_literals[u''-False-u'']",
"tests/pyupgrade_test.py::test_unicode_literals[u''-True-'']",
"tests/pyupgrade_test.py::test_unicode_literals[from",
"tests/pyupgrade_test.py::test_unicode_literals[\"\"\"with",
"tests/pyupgrade_test.py::test_main_trivial",
"tests/pyupgrade_test.py::test_main_noop",
"tests/pyupgrade_test.py::test_main_changes_a_file",
"tests/pyupgrade_test.py::test_main_syntax_error",
"tests/pyupgrade_test.py::test_main_non_utf8_bytes",
"tests/pyupgrade_test.py::test_py3_only_argument_unicode_literals"
]
| []
| MIT License | 1,089 | [
"pyupgrade.py"
]
| [
"pyupgrade.py"
]
|
|
mkdocs__mkdocs-1162 | 571bbbfb91eba00136f2b4e2131f1f2fc759b015 | 2017-03-14 01:05:39 | 84906a7a6c936719539339b2f46658c1a561527f | diff --git a/docs/about/release-notes.md b/docs/about/release-notes.md
index ea3550fc..7a96769c 100644
--- a/docs/about/release-notes.md
+++ b/docs/about/release-notes.md
@@ -21,6 +21,14 @@ The current and past members of the MkDocs team.
* [@d0ugal](https://github.com/d0ugal/)
* [@waylan](https://github.com/waylan/)
+## Version 0.16.2 (2017-03-??)
+
+* System root (`/`) is not a valid path for site_dir or docs_dir (#1161)
+* Refactor readthedocs theme navigation (#1155 & #1156)
+* Add support to dev server to serve custom error pages (#1040)
+* Ensure nav.homepage.url is not blank on error pages (#1131)
+* Increase livereload dependency to 2.5.1 (#1106)
+
## Version 0.16.1 (2016-12-22)
* Ensure scrollspy behavior does not affect nav bar (#1094)
diff --git a/docs/user-guide/configuration.md b/docs/user-guide/configuration.md
index 76652fe0..40f45196 100644
--- a/docs/user-guide/configuration.md
+++ b/docs/user-guide/configuration.md
@@ -181,8 +181,8 @@ up.
Lets you set the directory containing the documentation source markdown files.
This can either be a relative directory, in which case it is resolved relative
-to the directory containing you configuration file, or it can be an absolute
-directory path.
+to the directory containing your configuration file, or it can be an absolute
+directory path from the root of your local file system.
**default**: `'docs'`
@@ -190,8 +190,8 @@ directory path.
Lets you set the directory where the output HTML and other files are created.
This can either be a relative directory, in which case it is resolved relative
-to the directory containing you configuration file, or it can be an absolute
-directory path.
+to the directory containing your configuration file, or it can be an absolute
+directory path from the root of your local file system.
**default**: `'site'`
diff --git a/mkdocs/config/config_options.py b/mkdocs/config/config_options.py
index 7fa3ac6f..d5e656fd 100644
--- a/mkdocs/config/config_options.py
+++ b/mkdocs/config/config_options.py
@@ -253,14 +253,14 @@ class SiteDir(Dir):
# Validate that the docs_dir and site_dir don't contain the
# other as this will lead to copying back and forth on each
# and eventually make a deep nested mess.
- if (config['docs_dir'] + os.sep).startswith(config['site_dir'] + os.sep):
+ if (config['docs_dir'] + os.sep).startswith(config['site_dir'].rstrip(os.sep) + os.sep):
raise ValidationError(
("The 'docs_dir' should not be within the 'site_dir' as this "
"can mean the source files are overwritten by the output or "
"it will be deleted if --clean is passed to mkdocs build."
"(site_dir: '{0}', docs_dir: '{1}')"
).format(config['site_dir'], config['docs_dir']))
- elif (config['site_dir'] + os.sep).startswith(config['docs_dir'] + os.sep):
+ elif (config['site_dir'] + os.sep).startswith(config['docs_dir'].rstrip(os.sep) + os.sep):
raise ValidationError(
("The 'site_dir' should not be within the 'docs_dir' as this "
"leads to the build directory being copied into itself and "
| Advice: Do not set site_dir to /
Just a warning to anyone who might want to change their project build directory to the root of the project, with a .yml file like this:
```yaml
site_name: My Docs
site_dir: /
pages:
- Home: index.md
theme: material
```
It won't do what you think it will. If you run ```mkdocs build --clean``` you will have it recursively delete everything it can from your system root folder until it runs into a file it can't delete without sudo. You will lose the entire contents of your home folder as well as your Ubuntu profile, and do unknown damage to your system.
Time to reformat. :D | mkdocs/mkdocs | diff --git a/mkdocs/tests/config/config_options_tests.py b/mkdocs/tests/config/config_options_tests.py
index 284dbdc6..041dd21b 100644
--- a/mkdocs/tests/config/config_options_tests.py
+++ b/mkdocs/tests/config/config_options_tests.py
@@ -203,11 +203,21 @@ class DirTest(unittest.TestCase):
class SiteDirTest(unittest.TestCase):
+ def validate_config(self, config):
+ """ Given a config with values for site_dir and doc_dir, run site_dir post_validation. """
+ site_dir = config_options.SiteDir()
+ docs_dir = config_options.Dir()
+
+ config['config_file_path'] = os.path.join(os.path.abspath('..'), 'mkdocs.yml')
+
+ config['docs_dir'] = docs_dir.validate(config['docs_dir'])
+ config['site_dir'] = site_dir.validate(config['site_dir'])
+ site_dir.post_validation(config, 'site_dir')
+ return True # No errors were raised
+
def test_doc_dir_in_site_dir(self):
j = os.path.join
- option = config_options.SiteDir()
- docs_dir = config_options.Dir()
# The parent dir is not the same on every system, so use the actual dir name
parent_dir = mkdocs.__file__.split(os.sep)[-3]
@@ -218,16 +228,12 @@ class SiteDirTest(unittest.TestCase):
{'docs_dir': 'docs', 'site_dir': ''},
{'docs_dir': '', 'site_dir': ''},
{'docs_dir': j('..', parent_dir, 'docs'), 'site_dir': 'docs'},
+ {'docs_dir': 'docs', 'site_dir': '/'}
)
for test_config in test_configs:
- test_config['config_file_path'] = j(os.path.abspath('..'), 'mkdocs.yml')
-
- test_config['docs_dir'] = docs_dir.validate(test_config['docs_dir'])
- test_config['site_dir'] = option.validate(test_config['site_dir'])
-
self.assertRaises(config_options.ValidationError,
- option.post_validation, test_config, 'site_dir')
+ self.validate_config, test_config)
def test_site_dir_in_docs_dir(self):
@@ -237,19 +243,23 @@ class SiteDirTest(unittest.TestCase):
{'docs_dir': 'docs', 'site_dir': j('docs', 'site')},
{'docs_dir': '.', 'site_dir': 'site'},
{'docs_dir': '', 'site_dir': 'site'},
+ {'docs_dir': '/', 'site_dir': 'site'},
)
for test_config in test_configs:
- test_config['config_file_path'] = j(os.path.abspath('..'), 'mkdocs.yml')
+ self.assertRaises(config_options.ValidationError,
+ self.validate_config, test_config)
- docs_dir = config_options.Dir()
- option = config_options.SiteDir()
+ def test_common_prefix(self):
+ """ Legitimate settings with common prefixes should not fail validation. """
- test_config['docs_dir'] = docs_dir.validate(test_config['docs_dir'])
- test_config['site_dir'] = option.validate(test_config['site_dir'])
+ test_configs = (
+ {'docs_dir': 'docs', 'site_dir': 'docs-site'},
+ {'docs_dir': 'site-docs', 'site_dir': 'site'},
+ )
- self.assertRaises(config_options.ValidationError,
- option.post_validation, test_config, 'site_dir')
+ for test_config in test_configs:
+ assert self.validate_config(test_config)
class ThemeTest(unittest.TestCase):
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 3
} | 0.16 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements/project.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | click==8.1.8
exceptiongroup==1.2.2
importlib_metadata==8.6.1
iniconfig==2.1.0
Jinja2==3.1.6
livereload==2.7.1
Markdown==3.7
MarkupSafe==3.0.2
-e git+https://github.com/mkdocs/mkdocs.git@571bbbfb91eba00136f2b4e2131f1f2fc759b015#egg=mkdocs
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
PyYAML==6.0.2
tomli==2.2.1
tornado==6.4.2
zipp==3.21.0
| name: mkdocs
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- click==8.1.8
- exceptiongroup==1.2.2
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- jinja2==3.1.6
- livereload==2.7.1
- markdown==3.7
- markupsafe==3.0.2
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- pyyaml==6.0.2
- tomli==2.2.1
- tornado==6.4.2
- zipp==3.21.0
prefix: /opt/conda/envs/mkdocs
| [
"mkdocs/tests/config/config_options_tests.py::SiteDirTest::test_site_dir_in_docs_dir"
]
| []
| [
"mkdocs/tests/config/config_options_tests.py::OptionallyRequiredTest::test_default",
"mkdocs/tests/config/config_options_tests.py::OptionallyRequiredTest::test_empty",
"mkdocs/tests/config/config_options_tests.py::OptionallyRequiredTest::test_replace_default",
"mkdocs/tests/config/config_options_tests.py::OptionallyRequiredTest::test_required",
"mkdocs/tests/config/config_options_tests.py::OptionallyRequiredTest::test_required_no_default",
"mkdocs/tests/config/config_options_tests.py::TypeTest::test_length",
"mkdocs/tests/config/config_options_tests.py::TypeTest::test_multiple_types",
"mkdocs/tests/config/config_options_tests.py::TypeTest::test_single_type",
"mkdocs/tests/config/config_options_tests.py::URLTest::test_invalid",
"mkdocs/tests/config/config_options_tests.py::URLTest::test_invalid_url",
"mkdocs/tests/config/config_options_tests.py::URLTest::test_valid_url",
"mkdocs/tests/config/config_options_tests.py::RepoURLTest::test_edit_uri_bitbucket",
"mkdocs/tests/config/config_options_tests.py::RepoURLTest::test_edit_uri_custom",
"mkdocs/tests/config/config_options_tests.py::RepoURLTest::test_edit_uri_github",
"mkdocs/tests/config/config_options_tests.py::RepoURLTest::test_repo_name_bitbucket",
"mkdocs/tests/config/config_options_tests.py::RepoURLTest::test_repo_name_custom",
"mkdocs/tests/config/config_options_tests.py::RepoURLTest::test_repo_name_github",
"mkdocs/tests/config/config_options_tests.py::DirTest::test_doc_dir_is_config_dir",
"mkdocs/tests/config/config_options_tests.py::DirTest::test_file",
"mkdocs/tests/config/config_options_tests.py::DirTest::test_incorrect_type_attribute_error",
"mkdocs/tests/config/config_options_tests.py::DirTest::test_incorrect_type_type_error",
"mkdocs/tests/config/config_options_tests.py::DirTest::test_missing_dir",
"mkdocs/tests/config/config_options_tests.py::DirTest::test_missing_dir_but_required",
"mkdocs/tests/config/config_options_tests.py::DirTest::test_valid_dir",
"mkdocs/tests/config/config_options_tests.py::SiteDirTest::test_common_prefix",
"mkdocs/tests/config/config_options_tests.py::SiteDirTest::test_doc_dir_in_site_dir",
"mkdocs/tests/config/config_options_tests.py::ThemeTest::test_theme",
"mkdocs/tests/config/config_options_tests.py::ThemeTest::test_theme_invalid",
"mkdocs/tests/config/config_options_tests.py::ExtrasTest::test_empty",
"mkdocs/tests/config/config_options_tests.py::ExtrasTest::test_invalid",
"mkdocs/tests/config/config_options_tests.py::ExtrasTest::test_provided",
"mkdocs/tests/config/config_options_tests.py::ExtrasTest::test_walk",
"mkdocs/tests/config/config_options_tests.py::PagesTest::test_invalid_config",
"mkdocs/tests/config/config_options_tests.py::PagesTest::test_invalid_type",
"mkdocs/tests/config/config_options_tests.py::PagesTest::test_provided",
"mkdocs/tests/config/config_options_tests.py::PagesTest::test_provided_dict",
"mkdocs/tests/config/config_options_tests.py::PagesTest::test_provided_empty",
"mkdocs/tests/config/config_options_tests.py::NumPagesTest::test_invalid_pages",
"mkdocs/tests/config/config_options_tests.py::NumPagesTest::test_many_pages",
"mkdocs/tests/config/config_options_tests.py::NumPagesTest::test_one_page",
"mkdocs/tests/config/config_options_tests.py::NumPagesTest::test_provided",
"mkdocs/tests/config/config_options_tests.py::PrivateTest::test_defined",
"mkdocs/tests/config/config_options_tests.py::MarkdownExtensionsTest::test_builtins",
"mkdocs/tests/config/config_options_tests.py::MarkdownExtensionsTest::test_builtins_config",
"mkdocs/tests/config/config_options_tests.py::MarkdownExtensionsTest::test_configkey",
"mkdocs/tests/config/config_options_tests.py::MarkdownExtensionsTest::test_duplicates",
"mkdocs/tests/config/config_options_tests.py::MarkdownExtensionsTest::test_invalid_config_item",
"mkdocs/tests/config/config_options_tests.py::MarkdownExtensionsTest::test_invalid_config_option",
"mkdocs/tests/config/config_options_tests.py::MarkdownExtensionsTest::test_invalid_dict_item",
"mkdocs/tests/config/config_options_tests.py::MarkdownExtensionsTest::test_list_dicts",
"mkdocs/tests/config/config_options_tests.py::MarkdownExtensionsTest::test_mixed_list",
"mkdocs/tests/config/config_options_tests.py::MarkdownExtensionsTest::test_none",
"mkdocs/tests/config/config_options_tests.py::MarkdownExtensionsTest::test_not_list",
"mkdocs/tests/config/config_options_tests.py::MarkdownExtensionsTest::test_simple_list"
]
| []
| BSD 2-Clause "Simplified" License | 1,090 | [
"docs/user-guide/configuration.md",
"docs/about/release-notes.md",
"mkdocs/config/config_options.py"
]
| [
"docs/user-guide/configuration.md",
"docs/about/release-notes.md",
"mkdocs/config/config_options.py"
]
|
|
oasis-open__cti-pattern-validator-10 | 82956eaff8caf5af323b0c712550bfc5840fbd62 | 2017-03-14 14:07:07 | 82956eaff8caf5af323b0c712550bfc5840fbd62 | diff --git a/.travis.yml b/.travis.yml
index 896f07d..52cda7e 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,10 +1,12 @@
sudo: false
language: python
python:
+ - "2.6"
- "2.7"
- "3.3"
- "3.4"
- "3.5"
+ - "3.6"
install:
- pip install -U pip setuptools
- pip install tox-travis
diff --git a/stix2patterns/validator.py b/stix2patterns/validator.py
index b1563d9..79313c0 100644
--- a/stix2patterns/validator.py
+++ b/stix2patterns/validator.py
@@ -35,9 +35,15 @@ def run_validator(pattern):
returned in a list. The test passed if the returned list is empty.
'''
+ start = ''
if isinstance(pattern, six.string_types):
+ start = pattern[:2]
pattern = InputStream(pattern)
+ if not start:
+ start = pattern.readline()[:2]
+ pattern.seek(0)
+
parseErrListener = STIXPatternErrorListener()
lexer = STIXPatternLexer(pattern)
@@ -54,6 +60,11 @@ def run_validator(pattern):
parser.pattern()
+ # replace with easier-to-understand error message
+ if not (start[0] == '[' or start == '(['):
+ parseErrListener.err_strings[0] = "FAIL: Error found at line 1:0. " \
+ "input is missing square brackets"
+
return parseErrListener.err_strings
diff --git a/tox.ini b/tox.ini
index c40db2f..164fcf3 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,5 +1,5 @@
[tox]
-envlist = py27,py33,py34,py35,pycodestyle
+envlist = py26,py27,py33,py34,py35,py36,pycodestyle
[testenv]
deps = pytest
@@ -16,7 +16,9 @@ exclude=grammars
[travis]
python =
+ 2.6: py26
2.7: py27, pycodestyle
3.3: py33
3.4: py34
3.5: py35
+ 3.6: py36
| Unclear error message when missing brackets
If the input is missing the surrounding '[' and ']' (ie. the input is just a Comparison Expression, not an Observation Expression or Pattern Expresion), the validator's error message is unhelpful:
$ validate_patterns
Enter a pattern to validate: file-object:hashes.md5 = '79054025255fb1a26e4bc422aef54eb4'
FAIL: Error found at line 1:0. no viable alternative at input 'file-object'
This message comes from the ANTLR library so it can't easily be changed, but perhaps the validator could check if the input starts with '[' or '([' and print a more user-friendly error message? | oasis-open/cti-pattern-validator | diff --git a/stix2patterns/test/test_validator.py b/stix2patterns/test/test_validator.py
index 626b981..814517f 100644
--- a/stix2patterns/test/test_validator.py
+++ b/stix2patterns/test/test_validator.py
@@ -2,7 +2,6 @@
Test cases for stix2patterns/validator.py.
'''
import os
-
import pytest
from stix2patterns.validator import validate
@@ -22,21 +21,33 @@ def test_spec_patterns(test_input):
FAIL_CASES = [
- "file:size = 1280", # Does not use square brackets
- "[file:hashes.MD5 = cead3f77f6cda6ec00f57d76c9a6879f]" # No quotes around string
- "[file.size = 1280]", # Use period instead of colon
- "[file:name MATCHES /.*\\.dll/]", # Quotes around regular expression
- "[win-registry-key:key = 'hkey_local_machine\\\\foo\\\\bar'] WITHIN 5 HOURS", # SECONDS is the only valid time unit
+ ("file:size = 1280", # Does not use square brackets
+ "FAIL: Error found at line 1:0. input is missing square brackets"),
+ ("[file:size = ]", # Missing value
+ "FAIL: Error found at line 1:13. no viable alternative at input ']'"),
+ ("[file:hashes.MD5 = cead3f77f6cda6ec00f57d76c9a6879f]", # No quotes around string
+ "FAIL: Error found at line 1:19. no viable alternative at input 'cead3f77f6cda6ec00f57d76c9a6879f'"),
+ ("[file.size = 1280]", # Use period instead of colon
+ "FAIL: Error found at line 1:5. no viable alternative at input 'file.'"),
+ ("[file:name MATCHES /.*\\.dll/]", # Quotes around regular expression
+ "FAIL: Error found at line 1:19. mismatched input '/' expecting <INVALID>"),
+ ("[win-registry-key:key = 'hkey_local_machine\\\\foo\\\\bar'] WITHIN ]", # Missing Qualifier value
+ "FAIL: Error found at line 1:63. mismatched input ']' expecting {<INVALID>, <INVALID>}"),
+ ("[win-registry-key:key = 'hkey_local_machine\\\\foo\\\\bar'] WITHIN 5 HOURS]", # SECONDS is the only valid time unit
+ "FAIL: Error found at line 1:65. mismatched input 'HOURS' expecting <INVALID>"),
+ ("[network-traffic:dst_ref.value ISSUBSET ]", # Missing second Comparison operand
+ "FAIL: Error found at line 1:40. missing <INVALID> at ']'"),
# TODO: add more failing test cases.
]
[email protected]("test_input", FAIL_CASES)
-def test_fail_patterns(test_input):
[email protected]("test_input,test_output", FAIL_CASES)
+def test_fail_patterns(test_input, test_output):
'''
Validate that patterns fail as expected.
'''
- pass_test = validate(test_input, print_errs=True)
+ pass_test, errors = validate(test_input, ret_errs=True, print_errs=True)
+ assert errors[0] == test_output
assert pass_test is False
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 3
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest",
"pytest-cov",
"flake8"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.6",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | antlr4-python3-runtime==4.5.3
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
certifi==2021.5.30
coverage==6.2
flake8==5.0.4
importlib-metadata==4.2.0
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
mccabe==0.7.0
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pycodestyle==2.9.1
pyflakes==2.5.0
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
pytest-cov==4.0.0
six==1.17.0
-e git+https://github.com/oasis-open/cti-pattern-validator.git@82956eaff8caf5af323b0c712550bfc5840fbd62#egg=stix2_patterns
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
tomli==1.2.3
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: cti-pattern-validator
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- antlr4-python3-runtime==4.5.3
- coverage==6.2
- flake8==5.0.4
- importlib-metadata==4.2.0
- mccabe==0.7.0
- pycodestyle==2.9.1
- pyflakes==2.5.0
- pytest-cov==4.0.0
- six==1.17.0
- tomli==1.2.3
prefix: /opt/conda/envs/cti-pattern-validator
| [
"stix2patterns/test/test_validator.py::test_fail_patterns[file:size"
]
| []
| [
"stix2patterns/test/test_validator.py::test_spec_patterns[[file:hashes.\"SHA-256\"",
"stix2patterns/test/test_validator.py::test_spec_patterns[[email-message:from_ref.value",
"stix2patterns/test/test_validator.py::test_spec_patterns[[file:hashes.MD5",
"stix2patterns/test/test_validator.py::test_spec_patterns[[user-account:account_type",
"stix2patterns/test/test_validator.py::test_spec_patterns[[artifact:mime_type",
"stix2patterns/test/test_validator.py::test_spec_patterns[[file:name",
"stix2patterns/test/test_validator.py::test_spec_patterns[[file:extensions.windows-pebinary-ext.sections[*].entropy",
"stix2patterns/test/test_validator.py::test_spec_patterns[[file:mime_type",
"stix2patterns/test/test_validator.py::test_spec_patterns[[network-traffic:dst_ref.type",
"stix2patterns/test/test_validator.py::test_spec_patterns[[domain-name:value",
"stix2patterns/test/test_validator.py::test_spec_patterns[[url:value",
"stix2patterns/test/test_validator.py::test_spec_patterns[[x509-certificate:issuer",
"stix2patterns/test/test_validator.py::test_spec_patterns[[windows-registry-key:key",
"stix2patterns/test/test_validator.py::test_spec_patterns[[(file:name",
"stix2patterns/test/test_validator.py::test_spec_patterns[[email-message:sender_ref.value",
"stix2patterns/test/test_validator.py::test_spec_patterns[[x-usb-device:usbdrive.serial_number",
"stix2patterns/test/test_validator.py::test_spec_patterns[[process:command_line",
"stix2patterns/test/test_validator.py::test_spec_patterns[[network-traffic:dst_ref.value",
"stix2patterns/test/test_validator.py::test_spec_patterns[([file:name",
"stix2patterns/test/test_validator.py::test_fail_patterns[[file:size",
"stix2patterns/test/test_validator.py::test_fail_patterns[[file:hashes.MD5",
"stix2patterns/test/test_validator.py::test_fail_patterns[[file.size",
"stix2patterns/test/test_validator.py::test_fail_patterns[[file:name",
"stix2patterns/test/test_validator.py::test_fail_patterns[[win-registry-key:key",
"stix2patterns/test/test_validator.py::test_fail_patterns[[network-traffic:dst_ref.value",
"stix2patterns/test/test_validator.py::test_pass_patterns[[file:size",
"stix2patterns/test/test_validator.py::test_pass_patterns[[file:file_name",
"stix2patterns/test/test_validator.py::test_pass_patterns[[file:extended_properties.ntfs-ext.sid",
"stix2patterns/test/test_validator.py::test_pass_patterns[[emailaddr:value",
"stix2patterns/test/test_validator.py::test_pass_patterns[[ipv4addr:value",
"stix2patterns/test/test_validator.py::test_pass_patterns[[user-account:value",
"stix2patterns/test/test_validator.py::test_pass_patterns[[file:file_system_properties.file_name",
"stix2patterns/test/test_validator.py::test_pass_patterns[[network-connection:extended_properties[0].source_payload",
"stix2patterns/test/test_validator.py::test_pass_patterns[[win-registry-key:key"
]
| []
| BSD 3-Clause "New" or "Revised" License | 1,091 | [
".travis.yml",
"tox.ini",
"stix2patterns/validator.py"
]
| [
".travis.yml",
"tox.ini",
"stix2patterns/validator.py"
]
|
|
bmcfee__pumpp-41 | 18d19fb188342683d19af1388c8c085092b3b0be | 2017-03-14 14:42:09 | 18d19fb188342683d19af1388c8c085092b3b0be | diff --git a/pumpp/core.py b/pumpp/core.py
index 445af3c..410b612 100644
--- a/pumpp/core.py
+++ b/pumpp/core.py
@@ -16,6 +16,7 @@ import jams
from .exceptions import ParameterError
from .task import BaseTaskTransformer
from .feature import FeatureExtractor
+from .sampler import Sampler
def transform(audio_f, jam, *ops):
@@ -131,3 +132,26 @@ class Pump(object):
'''
return transform(audio_f, jam, *self.ops)
+
+ def sampler(self, n_samples, duration):
+ '''Construct a sampler object for this pump's operators.
+
+ Parameters
+ ----------
+ n_samples : None or int > 0
+ The number of samples to generate
+
+ duration : int > 0
+ The duration (in frames) of each sample patch
+
+ Returns
+ -------
+ sampler : pumpp.Sampler
+ The sampler object
+
+ See Also
+ --------
+ pumpp.Sampler
+ '''
+
+ return Sampler(n_samples, duration, *self.ops)
| connect Pump to Sampler
instead of having to say `Sampler(..., *P.ops)`, we should be able to just pull in a Pump object. | bmcfee/pumpp | diff --git a/tests/test_core.py b/tests/test_core.py
index 9833191..ea9c826 100644
--- a/tests/test_core.py
+++ b/tests/test_core.py
@@ -130,3 +130,23 @@ def test_pump_add(sr, hop_length):
def test_pump_add_bad():
pumpp.Pump('foo')
+
+
[email protected]('n_samples', [None, 10])
[email protected]('duration', [1, 5])
+def test_pump_sampler(sr, hop_length, n_samples, duration):
+ ops = [pumpp.feature.STFT(name='stft', sr=sr,
+ hop_length=hop_length,
+ n_fft=2*hop_length),
+
+ pumpp.task.BeatTransformer(name='beat', sr=sr,
+ hop_length=hop_length)]
+
+ P = pumpp.Pump(*ops)
+
+ S1 = pumpp.Sampler(n_samples, duration, *ops)
+ S2 = P.sampler(n_samples, duration)
+
+ assert S1._time == S2._time
+ assert S1.n_samples == S2.n_samples
+ assert S1.duration == S2.duration
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 1
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[docs,tests]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": null,
"pre_install": [
"apt-get update",
"apt-get install -y ffmpeg"
],
"python": "3.5",
"reqs_path": [
"docs/requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | absl-py==0.15.0
alabaster==0.7.13
appdirs==1.4.4
astunparse==1.6.3
attrs==22.2.0
audioread==3.0.1
Babel==2.11.0
cached-property==1.5.2
cachetools==4.2.4
certifi==2021.5.30
cffi==1.15.1
charset-normalizer==2.0.12
clang==5.0
coverage==6.2
dataclasses==0.8
decorator==5.1.1
docutils==0.18.1
flatbuffers==1.12
gast==0.4.0
google-auth==1.35.0
google-auth-oauthlib==0.4.6
google-pasta==0.2.0
grpcio==1.48.2
h5py==3.1.0
idna==3.10
imagesize==1.4.1
importlib-metadata==4.8.3
importlib-resources==5.4.0
iniconfig==1.1.1
jams==0.3.4
Jinja2==3.0.3
joblib==1.1.1
jsonschema==3.2.0
keras==2.6.0
Keras-Preprocessing==1.1.2
librosa==0.9.2
llvmlite==0.36.0
Markdown==3.3.7
MarkupSafe==2.0.1
mir_eval==0.8.2
numba==0.53.1
numpy==1.19.5
numpydoc==1.1.0
oauthlib==3.2.2
opt-einsum==3.3.0
packaging==21.3
pandas==1.1.5
pluggy==1.0.0
pooch==1.6.0
protobuf==3.19.6
-e git+https://github.com/bmcfee/pumpp.git@18d19fb188342683d19af1388c8c085092b3b0be#egg=pumpp
py==1.11.0
pyasn1==0.5.1
pyasn1-modules==0.3.0
pycparser==2.21
Pygments==2.14.0
pyparsing==3.1.4
pyrsistent==0.18.0
pytest==7.0.1
pytest-cov==4.0.0
python-dateutil==2.9.0.post0
pytz==2025.2
requests==2.27.1
requests-oauthlib==2.0.0
resampy==0.4.3
rsa==4.9
scikit-learn==0.24.2
scipy==1.5.4
six==1.15.0
snowballstemmer==2.2.0
sortedcontainers==2.4.0
soundfile==0.13.1
Sphinx==5.3.0
sphinxcontrib-applehelp==1.0.2
sphinxcontrib-devhelp==1.0.2
sphinxcontrib-htmlhelp==2.0.0
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==1.0.3
sphinxcontrib-serializinghtml==1.1.5
tensorboard==2.6.0
tensorboard-data-server==0.6.1
tensorboard-plugin-wit==1.8.1
tensorflow==2.6.2
tensorflow-estimator==2.6.0
termcolor==1.1.0
threadpoolctl==3.1.0
tomli==1.2.3
typing-extensions==3.7.4.3
urllib3==1.26.20
Werkzeug==2.0.3
wrapt==1.12.1
zipp==3.6.0
| name: pumpp
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- absl-py==0.15.0
- alabaster==0.7.13
- appdirs==1.4.4
- astunparse==1.6.3
- attrs==22.2.0
- audioread==3.0.1
- babel==2.11.0
- cached-property==1.5.2
- cachetools==4.2.4
- cffi==1.15.1
- charset-normalizer==2.0.12
- clang==5.0
- coverage==6.2
- dataclasses==0.8
- decorator==5.1.1
- docutils==0.18.1
- flatbuffers==1.12
- gast==0.4.0
- google-auth==1.35.0
- google-auth-oauthlib==0.4.6
- google-pasta==0.2.0
- grpcio==1.48.2
- h5py==3.1.0
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==4.8.3
- importlib-resources==5.4.0
- iniconfig==1.1.1
- jams==0.3.4
- jinja2==3.0.3
- joblib==1.1.1
- jsonschema==3.2.0
- keras==2.6.0
- keras-preprocessing==1.1.2
- librosa==0.9.2
- llvmlite==0.36.0
- markdown==3.3.7
- markupsafe==2.0.1
- mir-eval==0.8.2
- numba==0.53.1
- numpy==1.19.5
- numpydoc==1.1.0
- oauthlib==3.2.2
- opt-einsum==3.3.0
- packaging==21.3
- pandas==1.1.5
- pluggy==1.0.0
- pooch==1.6.0
- protobuf==3.19.6
- py==1.11.0
- pyasn1==0.5.1
- pyasn1-modules==0.3.0
- pycparser==2.21
- pygments==2.14.0
- pyparsing==3.1.4
- pyrsistent==0.18.0
- pytest==7.0.1
- pytest-cov==4.0.0
- python-dateutil==2.9.0.post0
- pytz==2025.2
- requests==2.27.1
- requests-oauthlib==2.0.0
- resampy==0.4.3
- rsa==4.9
- scikit-learn==0.24.2
- scipy==1.5.4
- six==1.15.0
- snowballstemmer==2.2.0
- sortedcontainers==2.4.0
- soundfile==0.13.1
- sphinx==5.3.0
- sphinxcontrib-applehelp==1.0.2
- sphinxcontrib-devhelp==1.0.2
- sphinxcontrib-htmlhelp==2.0.0
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==1.0.3
- sphinxcontrib-serializinghtml==1.1.5
- tensorboard==2.6.0
- tensorboard-data-server==0.6.1
- tensorboard-plugin-wit==1.8.1
- tensorflow==2.6.2
- tensorflow-estimator==2.6.0
- termcolor==1.1.0
- threadpoolctl==3.1.0
- tomli==1.2.3
- typing-extensions==3.7.4.3
- urllib3==1.26.20
- werkzeug==2.0.3
- wrapt==1.12.1
- zipp==3.6.0
prefix: /opt/conda/envs/pumpp
| [
"tests/test_core.py::test_pump_sampler[11025-128-1-None]",
"tests/test_core.py::test_pump_sampler[11025-128-1-10]",
"tests/test_core.py::test_pump_sampler[11025-128-5-None]",
"tests/test_core.py::test_pump_sampler[11025-128-5-10]",
"tests/test_core.py::test_pump_sampler[11025-512-1-None]",
"tests/test_core.py::test_pump_sampler[11025-512-1-10]",
"tests/test_core.py::test_pump_sampler[11025-512-5-None]",
"tests/test_core.py::test_pump_sampler[11025-512-5-10]",
"tests/test_core.py::test_pump_sampler[22050-128-1-None]",
"tests/test_core.py::test_pump_sampler[22050-128-1-10]",
"tests/test_core.py::test_pump_sampler[22050-128-5-None]",
"tests/test_core.py::test_pump_sampler[22050-128-5-10]",
"tests/test_core.py::test_pump_sampler[22050-512-1-None]",
"tests/test_core.py::test_pump_sampler[22050-512-1-10]",
"tests/test_core.py::test_pump_sampler[22050-512-5-None]",
"tests/test_core.py::test_pump_sampler[22050-512-5-10]"
]
| [
"tests/test_core.py::test_transform[None-11025-128-tests/data/test.ogg]",
"tests/test_core.py::test_transform[None-11025-512-tests/data/test.ogg]",
"tests/test_core.py::test_transform[None-22050-128-tests/data/test.ogg]",
"tests/test_core.py::test_transform[None-22050-512-tests/data/test.ogg]",
"tests/test_core.py::test_transform[tests/data/test.jams-11025-128-tests/data/test.ogg]",
"tests/test_core.py::test_transform[tests/data/test.jams-11025-512-tests/data/test.ogg]",
"tests/test_core.py::test_transform[tests/data/test.jams-22050-128-tests/data/test.ogg]",
"tests/test_core.py::test_transform[tests/data/test.jams-22050-512-tests/data/test.ogg]",
"tests/test_core.py::test_transform[jam2-11025-128-tests/data/test.ogg]",
"tests/test_core.py::test_transform[jam2-11025-512-tests/data/test.ogg]",
"tests/test_core.py::test_transform[jam2-22050-128-tests/data/test.ogg]",
"tests/test_core.py::test_transform[jam2-22050-512-tests/data/test.ogg]",
"tests/test_core.py::test_pump[None-11025-128-tests/data/test.ogg]",
"tests/test_core.py::test_pump[None-11025-512-tests/data/test.ogg]",
"tests/test_core.py::test_pump[None-22050-128-tests/data/test.ogg]",
"tests/test_core.py::test_pump[None-22050-512-tests/data/test.ogg]",
"tests/test_core.py::test_pump[tests/data/test.jams-11025-128-tests/data/test.ogg]",
"tests/test_core.py::test_pump[tests/data/test.jams-11025-512-tests/data/test.ogg]",
"tests/test_core.py::test_pump[tests/data/test.jams-22050-128-tests/data/test.ogg]",
"tests/test_core.py::test_pump[tests/data/test.jams-22050-512-tests/data/test.ogg]",
"tests/test_core.py::test_pump[jam2-11025-128-tests/data/test.ogg]",
"tests/test_core.py::test_pump[jam2-11025-512-tests/data/test.ogg]",
"tests/test_core.py::test_pump[jam2-22050-128-tests/data/test.ogg]",
"tests/test_core.py::test_pump[jam2-22050-512-tests/data/test.ogg]"
]
| [
"tests/test_core.py::test_pump_empty[None-11025-128-tests/data/test.ogg]",
"tests/test_core.py::test_pump_empty[None-11025-512-tests/data/test.ogg]",
"tests/test_core.py::test_pump_empty[None-22050-128-tests/data/test.ogg]",
"tests/test_core.py::test_pump_empty[None-22050-512-tests/data/test.ogg]",
"tests/test_core.py::test_pump_empty[tests/data/test.jams-11025-128-tests/data/test.ogg]",
"tests/test_core.py::test_pump_empty[tests/data/test.jams-11025-512-tests/data/test.ogg]",
"tests/test_core.py::test_pump_empty[tests/data/test.jams-22050-128-tests/data/test.ogg]",
"tests/test_core.py::test_pump_empty[tests/data/test.jams-22050-512-tests/data/test.ogg]",
"tests/test_core.py::test_pump_empty[jam2-11025-128-tests/data/test.ogg]",
"tests/test_core.py::test_pump_empty[jam2-11025-512-tests/data/test.ogg]",
"tests/test_core.py::test_pump_empty[jam2-22050-128-tests/data/test.ogg]",
"tests/test_core.py::test_pump_empty[jam2-22050-512-tests/data/test.ogg]",
"tests/test_core.py::test_pump_add[11025-128]",
"tests/test_core.py::test_pump_add[11025-512]",
"tests/test_core.py::test_pump_add[22050-128]",
"tests/test_core.py::test_pump_add[22050-512]"
]
| []
| ISC License | 1,092 | [
"pumpp/core.py"
]
| [
"pumpp/core.py"
]
|
|
networkx__networkx-2386 | 464bf8fc08ffa09cfd0183fb5cae1adfe6839e12 | 2017-03-14 15:36:14 | 3f4fd85765bf2d88188cfd4c84d0707152e6cd1e | diff --git a/networkx/algorithms/bipartite/matching.py b/networkx/algorithms/bipartite/matching.py
index 85f40a6eb..20a26f222 100644
--- a/networkx/algorithms/bipartite/matching.py
+++ b/networkx/algorithms/bipartite/matching.py
@@ -300,7 +300,8 @@ def eppstein_matching(G, top_nodes=None):
recurse(v)
-def _is_connected_by_alternating_path(G, v, matching, targets):
+def _is_connected_by_alternating_path(G, v, matched_edges, unmatched_edges,
+ targets):
"""Returns True if and only if the vertex `v` is connected to one of
the target vertices by an alternating path in `G`.
@@ -314,65 +315,56 @@ def _is_connected_by_alternating_path(G, v, matching, targets):
`v` is a vertex in `G`.
- `matching` is a dictionary representing a maximum matching in `G`, as
- returned by, for example, :func:`maximum_matching`.
+ `matched_edges` is a set of edges present in a maximum matching in `G`.
+
+ `unmatched_edges` is a set of edges not present in a maximum
+ matching in `G`.
`targets` is a set of vertices.
"""
- # Get the set of matched edges and the set of unmatched edges. Only include
- # one version of each undirected edge (for example, include edge (1, 2) but
- # not edge (2, 1)).
- matched_edges = {(u, v) for u, v in matching.items() if u <= v}
- unmatched_edges = set(G.edges()) - matched_edges
-
- def _alternating_dfs(u, depth, along_matched=True):
+ def _alternating_dfs(u, along_matched=True):
"""Returns True if and only if `u` is connected to one of the
targets by an alternating path.
`u` is a vertex in the graph `G`.
- `depth` specifies the maximum recursion depth of the depth-first
- search.
-
If `along_matched` is True, this step of the depth-first search
will continue only through edges in the given matching. Otherwise, it
will continue only through edges *not* in the given matching.
"""
- # Base case 1: u is one of the target vertices. `u` is connected to one
- # of the target vertices by an alternating path of length zero.
- if u in targets:
- return True
- # Base case 2: we have exceeded are allowed depth. In this case, we
- # have looked at a path of length `n`, so looking any further won't
- # help.
- if depth < 0:
- return False
- # Determine which set of edges to look across.
- valid_edges = matched_edges if along_matched else unmatched_edges
- for v in G[u]:
- # Consider only those neighbors connected via a valid edge.
- if (u, v) in valid_edges or (v, u) in valid_edges:
- # Recursively perform a depth-first search starting from the
- # neighbor. Decrement the depth limit and switch which set of
- # vertices will be valid for next time.
- return _alternating_dfs(v, depth - 1, not along_matched)
- # If there are no more vertices to look through and we haven't yet
- # found a target vertex, simply say that no path exists.
+ if along_matched:
+ edges = itertools.cycle([matched_edges, unmatched_edges])
+ else:
+ edges = itertools.cycle([unmatched_edges, matched_edges])
+ visited = set()
+ stack = [(u, iter(G[u]), next(edges))]
+ while stack:
+ parent, children, valid_edges = stack[-1]
+ try:
+ child = next(children)
+ if child not in visited:
+ if ((parent, child) in valid_edges
+ or (child, parent) in valid_edges):
+ if child in targets:
+ return True
+ visited.add(child)
+ stack.append((child, iter(G[child]), next(edges)))
+ except StopIteration:
+ stack.pop()
return False
# Check for alternating paths starting with edges in the matching, then
# check for alternating paths starting with edges not in the
- # matching. Initiate the depth-first search with the current depth equal to
- # the number of nodes in the graph.
- return (_alternating_dfs(v, len(G), along_matched=True) or
- _alternating_dfs(v, len(G), along_matched=False))
+ # matching.
+ return (_alternating_dfs(v, along_matched=True) or
+ _alternating_dfs(v, along_matched=False))
def _connected_by_alternating_paths(G, matching, targets):
"""Returns the set of vertices that are connected to one of the target
- vertices by an alternating path in `G`.
+ vertices by an alternating path in `G` or are themselves a target.
An *alternating path* is a path in which every other edge is in the
specified maximum matching (and the remaining edges in the path are not in
@@ -388,9 +380,18 @@ def _connected_by_alternating_paths(G, matching, targets):
`targets` is a set of vertices.
"""
- # TODO This can be parallelized.
- return {v for v in G if _is_connected_by_alternating_path(G, v, matching,
- targets)}
+ # Get the set of matched edges and the set of unmatched edges. Only include
+ # one version of each undirected edge (for example, include edge (1, 2) but
+ # not edge (2, 1)). Using frozensets as an intermediary step we do not
+ # require nodes to be orderable.
+ edge_sets = {frozenset((u, v)) for u, v in matching.items()}
+ matched_edges = {tuple(edge) for edge in edge_sets}
+ unmatched_edges = {(u, v) for (u, v) in G.edges()
+ if frozenset((u, v)) not in edge_sets}
+
+ return {v for v in G if v in targets or
+ _is_connected_by_alternating_path(G, v, matched_edges,
+ unmatched_edges, targets)}
def to_vertex_cover(G, matching, top_nodes=None):
| bipartite.to_vertex_cover() gives incorrect results
This program:
```python
import networkx as nx
print('Using version ' + nx.__version__)
G = nx.Graph([(0, 3), (1, 3), (1, 4), (2, 3)])
print('Edges: ' + repr(list(G.edges())))
assert nx.is_bipartite(G)
assert nx.is_connected(G)
matching = nx.bipartite.maximum_matching(G)
print('Matching: ' + repr(matching))
for u, v in matching.items():
assert matching[v] == u
vertex_cover = nx.bipartite.to_vertex_cover(G, matching)
print('Vertex cover: ' + repr(vertex_cover))
for u, v in G.edges():
assert u in vertex_cover or v in vertex_cover
```
produces:
```
$ python2.7 vertex_cover.py
Using version 1.11
Edges: [(0, 3), (1, 3), (1, 4), (2, 3)]
Matching: {0: 3, 1: 4, 3: 0, 4: 1}
Vertex cover: set([0, 1])
Traceback (most recent call last):
File "vertex_cover.py", line 16, in <module>
assert u in vertex_cover or v in vertex_cover
AssertionError
$ python3.6 vertex_cover.py
Using version 1.11
Edges: [(0, 3), (3, 1), (3, 2), (1, 4)]
Matching: {0: 3, 1: 4, 3: 0, 4: 1}
Vertex cover: {0, 1}
Traceback (most recent call last):
File "vertex_cover.py", line 16, in <module>
assert u in vertex_cover or v in vertex_cover
AssertionError
```
The vertex cover is incorrect, as verified by the `assert`s.
Current git version (21114082) fails in the same way. | networkx/networkx | diff --git a/networkx/algorithms/bipartite/tests/test_matching.py b/networkx/algorithms/bipartite/tests/test_matching.py
index c30582a19..22944c2c1 100644
--- a/networkx/algorithms/bipartite/tests/test_matching.py
+++ b/networkx/algorithms/bipartite/tests/test_matching.py
@@ -166,6 +166,25 @@ class TestMatching():
independent_set = set(G) - {v for _, v in vertex_cover}
assert_equal({'B', 'D', 'F', 'I', 'H'}, independent_set)
+ def test_vertex_cover_issue_2384(self):
+ G = nx.Graph([(0, 3), (1, 3), (1, 4), (2, 3)])
+ matching = maximum_matching(G)
+ vertex_cover = to_vertex_cover(G, matching)
+ for u, v in G.edges():
+ assert_true(u in vertex_cover or v in vertex_cover)
+
+ def test_unorderable_nodes(self):
+ a = object()
+ b = object()
+ c = object()
+ d = object()
+ e = object()
+ G = nx.Graph([(a, d), (b, d), (b, e), (c, d)])
+ matching = maximum_matching(G)
+ vertex_cover = to_vertex_cover(G, matching)
+ for u, v in G.edges():
+ assert_true(u in vertex_cover or v in vertex_cover)
+
def test_eppstein_matching():
"""Test in accordance to issue #1927"""
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 1
} | help | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"nose",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y libgdal-dev graphviz"
],
"python": "3.6",
"reqs_path": [
"requirements/default.txt",
"requirements/test.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
decorator==5.1.1
importlib-metadata==4.8.3
iniconfig==1.1.1
-e git+https://github.com/networkx/networkx.git@464bf8fc08ffa09cfd0183fb5cae1adfe6839e12#egg=networkx
nose==1.3.7
packaging==21.3
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
tomli==1.2.3
typing_extensions==4.1.1
zipp==3.6.0
| name: networkx
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- decorator==5.1.1
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- nose==1.3.7
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- tomli==1.2.3
- typing-extensions==4.1.1
- zipp==3.6.0
prefix: /opt/conda/envs/networkx
| [
"networkx/algorithms/bipartite/tests/test_matching.py::TestMatching::test_vertex_cover_issue_2384",
"networkx/algorithms/bipartite/tests/test_matching.py::TestMatching::test_unorderable_nodes"
]
| []
| [
"networkx/algorithms/bipartite/tests/test_matching.py::TestMatching::test_eppstein_matching",
"networkx/algorithms/bipartite/tests/test_matching.py::TestMatching::test_hopcroft_karp_matching",
"networkx/algorithms/bipartite/tests/test_matching.py::TestMatching::test_to_vertex_cover",
"networkx/algorithms/bipartite/tests/test_matching.py::TestMatching::test_eppstein_matching_simple",
"networkx/algorithms/bipartite/tests/test_matching.py::TestMatching::test_hopcroft_karp_matching_simple",
"networkx/algorithms/bipartite/tests/test_matching.py::TestMatching::test_eppstein_matching_disconnected",
"networkx/algorithms/bipartite/tests/test_matching.py::TestMatching::test_hopcroft_karp_matching_disconnected",
"networkx/algorithms/bipartite/tests/test_matching.py::TestMatching::test_issue_2127",
"networkx/algorithms/bipartite/tests/test_matching.py::test_eppstein_matching"
]
| []
| BSD 3-Clause | 1,093 | [
"networkx/algorithms/bipartite/matching.py"
]
| [
"networkx/algorithms/bipartite/matching.py"
]
|
|
dask__dask-2084 | 0c39da3493891891830ab9e8eb5d5d8db203f826 | 2017-03-14 19:55:45 | bdb021c7dcd94ae1fa51c82fae6cf4cf7319aa14 | mrocklin: +1 from me | diff --git a/dask/delayed.py b/dask/delayed.py
index d54d9b941..a9cbf6360 100644
--- a/dask/delayed.py
+++ b/dask/delayed.py
@@ -240,9 +240,29 @@ def delayed(obj, name=None, pure=False, nout=None, traverse=True):
>>> res.compute() # doctest: +SKIP
AttributeError("'list' object has no attribute 'not_a_real_method'")
- Methods are assumed to be impure by default, meaning that subsequent calls
- may return different results. To assume purity, set `pure=True`. This
- allows sharing of any intermediate values.
+ "Magic" methods (e.g. operators and attribute access) are assumed to be
+ pure, meaning that subsequent calls must return the same results. This is
+ not overrideable. To invoke an impure attribute or operator, you'd need to
+ use it in a delayed function with ``pure=False``.
+
+ >>> class Incrementer(object):
+ ... def __init__(self):
+ ... self._n = 0
+ ... @property
+ ... def n(self):
+ ... self._n += 1
+ ... return self._n
+ ...
+ >>> x = delayed(Incrementer())
+ >>> x.n.key == x.n.key
+ True
+ >>> get_n = delayed(lambda x: x.n, pure=False)
+ >>> get_n(x).key == get_n(x).key
+ False
+
+ In contrast, methods are assumed to be impure by default, meaning that
+ subsequent calls may return different results. To assume purity, set
+ `pure=True`. This allows sharing of any intermediate values.
>>> a.count(2, pure=True).key == a.count(2, pure=True).key
True
@@ -252,7 +272,6 @@ def delayed(obj, name=None, pure=False, nout=None, traverse=True):
>>> a.count(2, dask_key_name="count_2")
Delayed("count_2")
-
"""
if isinstance(obj, Delayed):
return obj
@@ -346,7 +365,7 @@ class Delayed(base.Base):
def __getattr__(self, attr):
if attr.startswith('_'):
raise AttributeError("Attribute {0} not found".format(attr))
- return DelayedAttr(self, attr, 'getattr-%s' % tokenize(self, attr))
+ return DelayedAttr(self, attr)
def __setattr__(self, attr, val):
if attr in self.__slots__:
@@ -436,10 +455,10 @@ class DelayedLeaf(Delayed):
class DelayedAttr(Delayed):
__slots__ = ('_obj', '_attr', '_key')
- def __init__(self, obj, attr, key):
+ def __init__(self, obj, attr):
self._obj = obj
self._attr = attr
- self._key = key
+ self._key = 'getattr-%s' % tokenize(obj, attr, pure=True)
@property
def dask(self):
| Inheritance of Purity
(As discussed here: http://stackoverflow.com/questions/42773134/inheritance-of-purity/42773643)
I have a question about inheritance of function purity. For example, consider this case:
```python
In [1]: from dask import delayed
In [2]: myArr = delayed(np.ones, pure=True)((10,10))
In [3]: myArr
Out[3]: Delayed('ones-850ff41a84b309775d01d5f3e5c4d1c4')
In [4]: myArr = delayed(np.ones, pure=True)((10,10))
In [5]: myArr
Out[5]: Delayed('ones-850ff41a84b309775d01d5f3e5c4d1c4')
In [6]: myArr.shape
Out[6]: Delayed('getattr-de14f312-2605-4fc4-a419-c376520f73b4')
In [7]: myArr.shape
Out[7]: Delayed('getattr-9e547a62-0bc6-43e5-8d3c-cac532b73511')
In [8]: delayed(getattr, pure=True)(myArr, 'shape')
Out[8]: Delayed('getattr-5224be928bad33c9778d4f96d610cc37')
In [9]: delayed(getattr, pure=True)(myArr, 'shape')
Out[9]: Delayed('getattr-5224be928bad33c9778d4f96d610cc37')
```
I would expect line [6] and [7] to yield the same key, since they are accessing an attribute from a delayed instance declared pure. However, it is not the case, and I must explicitly declare the `getattr` method to be pure for this to work, as seen in lines [8] and [9].
As discussed in the stackoverflow link, this seems to be purely an issue of `dask` semantics. I would be inclined to voting for purity to be inherited. But I am a new user, so I would appreciate feedback/comments on this. Thanks!
PS: The eventual use case is for dask distributed. It seems that the way purity is treated in this case may possibly be different than the case listed here. | dask/dask | diff --git a/dask/tests/test_delayed.py b/dask/tests/test_delayed.py
index 11f00a428..bfea68244 100644
--- a/dask/tests/test_delayed.py
+++ b/dask/tests/test_delayed.py
@@ -69,6 +69,7 @@ def test_methods():
def test_attributes():
a = delayed(2 + 1j)
+ assert a.real._key == a.real._key
assert a.real.compute() == 2
assert a.imag.compute() == 1
assert (a.real + a.imag).compute() == 3
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 1
} | 1.16 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[complete]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest",
"flake8",
"pandas_datareader",
"pytest-xdist"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.6",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | aiobotocore==2.1.2
aiohttp==3.8.6
aioitertools==0.11.0
aiosignal==1.2.0
async-timeout==4.0.2
asynctest==0.13.0
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
botocore==1.23.24
certifi==2021.5.30
charset-normalizer==2.0.12
click==8.0.4
cloudpickle==2.2.1
-e git+https://github.com/dask/dask.git@0c39da3493891891830ab9e8eb5d5d8db203f826#egg=dask
distributed==1.16.1
execnet==1.9.0
flake8==5.0.4
frozenlist==1.2.0
fsspec==2022.1.0
HeapDict==1.0.1
idna==3.10
idna-ssl==1.1.0
importlib-metadata==4.2.0
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
jmespath==0.10.0
locket==1.0.0
lxml==5.3.1
mccabe==0.7.0
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
msgpack-python==0.5.6
multidict==5.2.0
numpy==1.19.5
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pandas==1.1.5
pandas-datareader==0.10.0
partd==1.2.0
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
psutil==7.0.0
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pycodestyle==2.9.1
pyflakes==2.5.0
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
pytest-xdist==3.0.2
python-dateutil==2.9.0.post0
pytz==2025.2
requests==2.27.1
s3fs==2022.1.0
six==1.17.0
sortedcollections==2.1.0
sortedcontainers==2.4.0
tblib==1.7.0
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
toolz==0.12.0
tornado==6.1
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
urllib3==1.26.20
wrapt==1.16.0
yarl==1.7.2
zict==2.1.0
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: dask
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- aiobotocore==2.1.2
- aiohttp==3.8.6
- aioitertools==0.11.0
- aiosignal==1.2.0
- async-timeout==4.0.2
- asynctest==0.13.0
- botocore==1.23.24
- charset-normalizer==2.0.12
- click==8.0.4
- cloudpickle==2.2.1
- distributed==1.16.1
- execnet==1.9.0
- flake8==5.0.4
- frozenlist==1.2.0
- fsspec==2022.1.0
- heapdict==1.0.1
- idna==3.10
- idna-ssl==1.1.0
- importlib-metadata==4.2.0
- jmespath==0.10.0
- locket==1.0.0
- lxml==5.3.1
- mccabe==0.7.0
- msgpack-python==0.5.6
- multidict==5.2.0
- numpy==1.19.5
- pandas==1.1.5
- pandas-datareader==0.10.0
- partd==1.2.0
- psutil==7.0.0
- pycodestyle==2.9.1
- pyflakes==2.5.0
- pytest-xdist==3.0.2
- python-dateutil==2.9.0.post0
- pytz==2025.2
- requests==2.27.1
- s3fs==2022.1.0
- six==1.17.0
- sortedcollections==2.1.0
- sortedcontainers==2.4.0
- tblib==1.7.0
- toolz==0.12.0
- tornado==6.1
- urllib3==1.26.20
- wrapt==1.16.0
- yarl==1.7.2
- zict==2.1.0
prefix: /opt/conda/envs/dask
| [
"dask/tests/test_delayed.py::test_attributes"
]
| []
| [
"dask/tests/test_delayed.py::test_to_task_dask",
"dask/tests/test_delayed.py::test_delayed",
"dask/tests/test_delayed.py::test_operators",
"dask/tests/test_delayed.py::test_methods",
"dask/tests/test_delayed.py::test_method_getattr_optimize",
"dask/tests/test_delayed.py::test_delayed_errors",
"dask/tests/test_delayed.py::test_common_subexpressions",
"dask/tests/test_delayed.py::test_lists",
"dask/tests/test_delayed.py::test_literates",
"dask/tests/test_delayed.py::test_literates_keys",
"dask/tests/test_delayed.py::test_lists_are_concrete",
"dask/tests/test_delayed.py::test_traverse_false",
"dask/tests/test_delayed.py::test_pure",
"dask/tests/test_delayed.py::test_nout",
"dask/tests/test_delayed.py::test_kwargs",
"dask/tests/test_delayed.py::test_array_delayed",
"dask/tests/test_delayed.py::test_array_bag_delayed",
"dask/tests/test_delayed.py::test_delayed_picklable",
"dask/tests/test_delayed.py::test_delayed_compute_forward_kwargs",
"dask/tests/test_delayed.py::test_delayed_method_descriptor",
"dask/tests/test_delayed.py::test_delayed_callable",
"dask/tests/test_delayed.py::test_delayed_name_on_call",
"dask/tests/test_delayed.py::test_callable_obj",
"dask/tests/test_delayed.py::test_name_consitent_across_instances",
"dask/tests/test_delayed.py::test_sensitive_to_partials",
"dask/tests/test_delayed.py::test_delayed_name",
"dask/tests/test_delayed.py::test_finalize_name"
]
| []
| BSD 3-Clause "New" or "Revised" License | 1,094 | [
"dask/delayed.py"
]
| [
"dask/delayed.py"
]
|
google__mobly-146 | 3bf63583dae310bd2a7a147bbff01b6aa917f72e | 2017-03-14 22:56:44 | 777e2f766959889d12108024be0070fd0939dd6a | diff --git a/mobly/controllers/android_device.py b/mobly/controllers/android_device.py
index 79d3f16..5675e34 100644
--- a/mobly/controllers/android_device.py
+++ b/mobly/controllers/android_device.py
@@ -16,6 +16,7 @@
from builtins import str
from builtins import open
+from past.builtins import basestring
import contextlib
import logging
@@ -78,17 +79,19 @@ def create(configs):
ads = get_all_instances()
elif not isinstance(configs, list):
raise Error(ANDROID_DEVICE_NOT_LIST_CONFIG_MSG)
- elif isinstance(configs[0], str):
- # Configs is a list of serials.
- ads = get_instances(configs)
- else:
+ elif isinstance(configs[0], dict):
# Configs is a list of dicts.
ads = get_instances_with_configs(configs)
+ elif isinstance(configs[0], basestring):
+ # Configs is a list of strings representing serials.
+ ads = get_instances(configs)
+ else:
+ raise Error("No valid config found in: %s" % configs)
connected_ads = list_adb_devices()
for ad in ads:
if ad.serial not in connected_ads:
- raise DeviceError(ad, 'Android device is specified in config but '
+ raise DeviceError(ad, 'Android device is specified in config but'
' is not attached.')
_start_services_on_ads(ads)
return ads
diff --git a/mobly/controllers/android_device_lib/jsonrpc_client_base.py b/mobly/controllers/android_device_lib/jsonrpc_client_base.py
index 187ec1c..08237fb 100644
--- a/mobly/controllers/android_device_lib/jsonrpc_client_base.py
+++ b/mobly/controllers/android_device_lib/jsonrpc_client_base.py
@@ -188,7 +188,7 @@ class JsonRpcClientBase(object):
for _ in range(wait_time):
time.sleep(1)
if self._is_app_running():
- self.log.debug('Successfully started %s', self.app_name)
+ self._log.debug('Successfully started %s', self.app_name)
return
raise AppStartError('%s failed to start on %s.' %
(self.app_name, self._adb.serial))
| controllers/android_device.py throws an exception on android configuration with just string
Seems like an earlier checkin to switch to using base64 string to load configuration might have introduced a bug in mobly/controllers/android_device.create(configs). I was hitting an issue where I specified the android device serial in string format in my configuration file but was failing the isinstance check with str, so instead it tries to interpret my android config as a dict format and fails. Changing the isinstance check with basestring instead of str seems to have solved the problem.
Here is the snippet of the exception thrown.
mobly/controllers/android_device.py", line 81, in create
mobly/controllers/android_device.py", line 219, in get_instances_with_configs
MoblyTest [ln] serial = c.pop('serial')
MoblyTest [ln] AttributeError: 'str' object has no attribute 'pop' | google/mobly | diff --git a/tests/lib/mock_android_device.py b/tests/lib/mock_android_device.py
index 5fd6fac..03b62a1 100755
--- a/tests/lib/mock_android_device.py
+++ b/tests/lib/mock_android_device.py
@@ -33,7 +33,7 @@ def get_mock_ads(num):
"""
ads = []
for i in range(num):
- ad = mock.MagicMock(name="AndroidDevice", serial=i, h_port=None)
+ ad = mock.MagicMock(name="AndroidDevice", serial=str(i), h_port=None)
ads.append(ad)
return ads
@@ -42,6 +42,18 @@ def get_all_instances():
return get_mock_ads(5)
+def get_instances(serials):
+ ads = []
+ for serial in serials:
+ ad = mock.MagicMock(name="AndroidDevice", serial=serial, h_port=None)
+ ads.append(ad)
+ return ads
+
+
+def get_instances_with_configs(dicts):
+ return get_instances([d['serial'] for d in dicts])
+
+
def list_adb_devices():
return [ad.serial for ad in get_mock_ads(5)]
diff --git a/tests/mobly/controllers/android_device_test.py b/tests/mobly/controllers/android_device_test.py
index 38247cb..c5b3733 100755
--- a/tests/mobly/controllers/android_device_test.py
+++ b/tests/mobly/controllers/android_device_test.py
@@ -79,6 +79,29 @@ class AndroidDeviceTest(unittest.TestCase):
for actual, expected in zip(actual_ads, mock_android_device.get_mock_ads(5)):
self.assertEqual(actual.serial, expected.serial)
+ @mock.patch.object(android_device,
+ "get_instances",
+ new=mock_android_device.get_instances)
+ @mock.patch.object(android_device,
+ "list_adb_devices",
+ new=mock_android_device.list_adb_devices)
+ def test_create_with_string_list(self):
+ string_list = [u'1', '2']
+ actual_ads = android_device.create(string_list)
+ for actual_ad, expected_serial in zip(actual_ads, ['1', '2']):
+ self.assertEqual(actual_ad.serial, expected_serial)
+
+ @mock.patch.object(android_device,
+ "get_instances_with_configs",
+ new=mock_android_device.get_instances_with_configs)
+ @mock.patch.object(android_device,
+ "list_adb_devices",
+ new=mock_android_device.list_adb_devices)
+ def test_create_with_dict_list(self):
+ string_list = [{'serial': '1'}, {'serial': '2'}]
+ actual_ads = android_device.create(string_list)
+ for actual_ad, expected_serial in zip(actual_ads, ['1', '2']):
+ self.assertEqual(actual_ad.serial, expected_serial)
def test_create_with_empty_config(self):
expected_msg = android_device.ANDROID_DEVICE_EMPTY_CONFIG_MSG
with self.assertRaisesRegexp(android_device.Error,
@@ -91,15 +114,21 @@ class AndroidDeviceTest(unittest.TestCase):
expected_msg):
android_device.create("HAHA")
+ def test_create_with_no_valid_config(self):
+ expected_msg = "No valid config found in: .*"
+ with self.assertRaisesRegexp(android_device.Error,
+ expected_msg):
+ android_device.create([1])
+
def test_get_device_success_with_serial(self):
ads = mock_android_device.get_mock_ads(5)
- expected_serial = 0
+ expected_serial = '0'
ad = android_device.get_device(ads, serial=expected_serial)
self.assertEqual(ad.serial, expected_serial)
def test_get_device_success_with_serial_and_extra_field(self):
ads = mock_android_device.get_mock_ads(5)
- expected_serial = 1
+ expected_serial = '1'
expected_h_port = 5555
ads[1].h_port = expected_h_port
ad = android_device.get_device(ads,
@@ -119,7 +148,7 @@ class AndroidDeviceTest(unittest.TestCase):
def test_get_device_too_many_matches(self):
ads = mock_android_device.get_mock_ads(5)
target_serial = ads[1].serial = ads[0].serial
- expected_msg = "More than one device matched: \[0, 0\]"
+ expected_msg = "More than one device matched: \['0', '0'\]"
with self.assertRaisesRegexp(android_device.Error,
expected_msg):
android_device.get_device(ads, serial=target_serial)
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 0,
"test_score": 1
},
"num_modified_files": 2
} | 1.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
future==1.0.0
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
-e git+https://github.com/google/mobly.git@3bf63583dae310bd2a7a147bbff01b6aa917f72e#egg=mobly
mock==1.0.1
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
pytest @ file:///croot/pytest_1738938843180/work
pytz==2025.2
PyYAML==6.0.2
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
| name: mobly
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- future==1.0.0
- mock==1.0.1
- pytz==2025.2
- pyyaml==6.0.2
prefix: /opt/conda/envs/mobly
| [
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_create_with_no_valid_config"
]
| []
| [
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_AndroidDevice_build_info",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_AndroidDevice_cat_adb_log",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_AndroidDevice_debug_tag",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_AndroidDevice_instantiation",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_AndroidDevice_load_snippet",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_AndroidDevice_load_snippet_dup_attribute_name",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_AndroidDevice_load_snippet_dup_package",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_AndroidDevice_load_snippet_dup_snippet_name",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_AndroidDevice_snippet_cleanup",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_AndroidDevice_take_bug_report",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_AndroidDevice_take_bug_report_fail",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_AndroidDevice_take_bug_report_fallback",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_AndroidDevice_take_logcat",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_AndroidDevice_take_logcat_with_user_param",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_create_with_dict_list",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_create_with_empty_config",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_create_with_not_list_config",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_create_with_pickup_all",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_create_with_string_list",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_get_device_no_match",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_get_device_success_with_serial",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_get_device_success_with_serial_and_extra_field",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_get_device_too_many_matches",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_start_services_on_ads"
]
| []
| Apache License 2.0 | 1,095 | [
"mobly/controllers/android_device.py",
"mobly/controllers/android_device_lib/jsonrpc_client_base.py"
]
| [
"mobly/controllers/android_device.py",
"mobly/controllers/android_device_lib/jsonrpc_client_base.py"
]
|
|
wireservice__csvkit-809 | 53f4a2df4992e04796e411c6a23e005a92205c2d | 2017-03-14 23:20:48 | e88daad61ed949edf11dfbf377eb347a9b969d47 | diff --git a/CHANGELOG.rst b/CHANGELOG.rst
index a05c7b8..46b328c 100644
--- a/CHANGELOG.rst
+++ b/CHANGELOG.rst
@@ -8,6 +8,7 @@ Improvements:
* Add a :code:`--locale` option to set the locale of any formatted numbers.
* Add a :code:`--date-format` option to set a strptime date format string.
* Add a :code:`--datetime-format` option to set a strptime datetime format string.
+* Make :code:`--blanks` a common argument across all tools.
* :code:`-I` is the short option for :code:`--no-inference`.
* :doc:`/scripts/csvclean`, :doc:`/scripts/csvjson`, :doc:`/scripts/csvpy` support :code:`--no-header-row`.
* :doc:`/scripts/csvclean` is faster and no longer requires exponential time in the worst case.
diff --git a/csvkit/cli.py b/csvkit/cli.py
index 5e5a092..23a8ce7 100644
--- a/csvkit/cli.py
+++ b/csvkit/cli.py
@@ -167,6 +167,9 @@ class CSVKitUtility(object):
if 'S' not in self.override_flags:
self.argparser.add_argument('-S', '--skipinitialspace', dest='skipinitialspace', action='store_true',
help='Ignore whitespace immediately following the delimiter.')
+ if 'blanks' not in self.override_flags:
+ self.argparser.add_argument('--blanks', dest='blanks', action='store_true',
+ help='Do not convert "", "na", "n/a", "none", "null", "." to NULL.')
if 'date-format' not in self.override_flags:
self.argparser.add_argument('--date-format', dest='date_format',
help='Specify a strptime date format string like "%%m/%%d/%%Y".')
@@ -194,7 +197,7 @@ class CSVKitUtility(object):
help='When interpreting or displaying column numbers, use zero-based numbering instead of the default 1-based numbering.')
self.argparser.add_argument('-V', '--version', action='version', version='%(prog)s 1.0.2',
- help='Display version information and exit.')
+ help='Display version information and exit.')
def _open_input_file(self, path):
"""
@@ -284,21 +287,22 @@ class CSVKitUtility(object):
def get_column_types(self):
if getattr(self.args, 'blanks', None):
- text_type = agate.Text(cast_nulls=False)
+ type_kwargs = {'null_values': ()}
else:
- text_type = agate.Text()
+ type_kwargs = {}
- if self.args.no_inference:
- return agate.TypeTester(types=[text_type])
- else:
- return agate.TypeTester(types=[
- agate.Boolean(),
- agate.Number(locale=self.args.locale),
- agate.TimeDelta(),
- agate.Date(date_format=self.args.date_format),
- agate.DateTime(datetime_format=self.args.datetime_format),
- text_type
- ])
+ types = [agate.Text(**type_kwargs)]
+
+ if not self.args.no_inference:
+ types = [
+ agate.Boolean(**type_kwargs),
+ agate.Number(locale=self.args.locale, **type_kwargs),
+ agate.TimeDelta(**type_kwargs),
+ agate.Date(date_format=self.args.date_format, **type_kwargs),
+ agate.DateTime(datetime_format=self.args.datetime_format, **type_kwargs),
+ ] + types
+
+ return agate.TypeTester(types=types)
def get_column_offset(self):
if self.args.zero_based:
diff --git a/csvkit/utilities/csvcut.py b/csvkit/utilities/csvcut.py
index ed3dc84..f4460d2 100644
--- a/csvkit/utilities/csvcut.py
+++ b/csvkit/utilities/csvcut.py
@@ -16,7 +16,7 @@ from csvkit.cli import CSVKitUtility
class CSVCut(CSVKitUtility):
description = 'Filter and truncate CSV files. Like the Unix "cut" command, but for tabular data.'
- override_flags = ['L', 'date-format', 'datetime-format']
+ override_flags = ['L', 'blanks', 'date-format', 'datetime-format']
def add_arguments(self):
self.argparser.add_argument('-n', '--names', dest='names_only', action='store_true',
diff --git a/csvkit/utilities/csvgrep.py b/csvkit/utilities/csvgrep.py
index b2c013e..bec40b1 100644
--- a/csvkit/utilities/csvgrep.py
+++ b/csvkit/utilities/csvgrep.py
@@ -11,7 +11,7 @@ from csvkit.grep import FilteringCSVReader
class CSVGrep(CSVKitUtility):
description = 'Search CSV files. Like the Unix "grep" command, but for tabular data.'
- override_flags = ['L', 'date-format', 'datetime-format']
+ override_flags = ['L', 'blanks', 'date-format', 'datetime-format']
def add_arguments(self):
self.argparser.add_argument('-n', '--names', dest='names_only', action='store_true',
diff --git a/csvkit/utilities/csvsql.py b/csvkit/utilities/csvsql.py
index 77e1757..5f86a02 100644
--- a/csvkit/utilities/csvsql.py
+++ b/csvkit/utilities/csvsql.py
@@ -40,8 +40,6 @@ class CSVSQL(CSVKitUtility):
help='Skip creating a table. Only valid when --insert is specified.')
self.argparser.add_argument('--overwrite', dest='overwrite', action='store_true',
help='Drop the table before creating.')
- self.argparser.add_argument('--blanks', dest='blanks', action='store_true',
- help='Do not coerce empty strings to NULL values.')
self.argparser.add_argument('--db-schema', dest='db_schema',
help='Optional name of database schema to create table(s) in.')
self.argparser.add_argument('-y', '--snifflimit', dest='sniff_limit', type=int,
diff --git a/csvkit/utilities/csvstack.py b/csvkit/utilities/csvstack.py
index f32f514..56e2051 100644
--- a/csvkit/utilities/csvstack.py
+++ b/csvkit/utilities/csvstack.py
@@ -9,7 +9,8 @@ from csvkit.cli import CSVKitUtility, make_default_headers
class CSVStack(CSVKitUtility):
description = 'Stack up the rows from multiple CSV files, optionally adding a grouping value.'
- override_flags = ['f', 'L', 'date-format', 'datetime-format']
+ # Override 'f' because the utility accepts multiple files.
+ override_flags = ['f', 'L', 'blanks', 'date-format', 'datetime-format']
def add_arguments(self):
self.argparser.add_argument(metavar="FILE", nargs='+', dest='input_paths', default=['-'],
diff --git a/csvkit/utilities/sql2csv.py b/csvkit/utilities/sql2csv.py
index 4e3d24e..9533bb2 100644
--- a/csvkit/utilities/sql2csv.py
+++ b/csvkit/utilities/sql2csv.py
@@ -8,7 +8,8 @@ from csvkit.cli import CSVKitUtility
class SQL2CSV(CSVKitUtility):
description = 'Execute an SQL query on a database and output the result to a CSV file.'
- override_flags = 'f,b,d,e,H,K,L,p,q,S,t,u,z,date-format,datetime-format,zero'.split(',')
+ # Overrides all flags except --linenumbers, --verbose, --version.
+ override_flags = 'f,b,d,e,H,K,L,p,q,S,t,u,z,blanks,date-format,datetime-format,zero'.split(',')
def add_arguments(self):
self.argparser.add_argument('--db', dest='connection_string', default='sqlite://',
diff --git a/docs/common_arguments.rst b/docs/common_arguments.rst
index 79edf46..1290980 100644
--- a/docs/common_arguments.rst
+++ b/docs/common_arguments.rst
@@ -29,6 +29,8 @@ All tools which accept CSV as input share a set of common command-line arguments
Specify the locale (en_US) of any formatted numbers.
-S, --skipinitialspace
Ignore whitespace immediately following the delimiter.
+ --blanks Do not coerce empty, "na", "n/a", "none", "null", "."
+ strings to NULL values.
--date-format DATE_FORMAT
Specify a strptime date format string like "%m/%d/%Y".
--datetime-format DATETIME_FORMAT
diff --git a/docs/scripts/csvsql.rst b/docs/scripts/csvsql.rst
index ed62c05..b7154f6 100644
--- a/docs/scripts/csvsql.rst
+++ b/docs/scripts/csvsql.rst
@@ -47,7 +47,6 @@ Generate SQL statements for a CSV file or execute those statements directly on a
--no-create Skip creating a table. Only valid when --insert is
specified.
--overwrite Drop the table before creating.
- --blanks Do not coerce empty strings to NULL values.
--db-schema DB_SCHEMA
Optional name of database schema to create table(s)
in.
diff --git a/examples/blanks.csv b/examples/blanks.csv
new file mode 100644
index 0000000..e801118
--- /dev/null
+++ b/examples/blanks.csv
@@ -0,0 +1,2 @@
+a,b,c,d,e,f
+,NA,N/A,NONE,NULL,.
diff --git a/examples/blanks_converted.csv b/examples/blanks_converted.csv
new file mode 100644
index 0000000..4c70fa1
--- /dev/null
+++ b/examples/blanks_converted.csv
@@ -0,0 +1,2 @@
+a,b,c,d,e,f
+,,,,,
| Alpha2 ISO country code for Namibia (NA) is silently discarded
This is quite unexpected. Probably related to #49.
Minimal example:
### alpha2.csv
```
name,ISO3166-1-Alpha-2
Namibia,NA
```
### csvstat: `Most common values: None`
```
$ csvstat alpha2.csv
1. "name"
Type of data: Text
Contains null values: False
Unique values: 1
Most common values: Namibia (1x)
2. "ISO3166-1-Alpha-2"
Type of data: Boolean
Contains null values: True (excluded from calculations)
Unique values: 1
Most common values: None (1x)
Row count: 1
```
### csvjoin: `"NA"` is silently replaced with `""`
```
$ csvjoin -c1,1 alpha2.csv alpha2.csv
name,ISO3166-1-Alpha-2,ISO3166-1-Alpha-22
Namibia,,
``` | wireservice/csvkit | diff --git a/tests/test_utilities/test_csvjoin.py b/tests/test_utilities/test_csvjoin.py
index 6d287b1..b0f29e7 100644
--- a/tests/test_utilities/test_csvjoin.py
+++ b/tests/test_utilities/test_csvjoin.py
@@ -44,6 +44,18 @@ class TestCSVJoin(CSVKitTestCase, EmptyFileTests):
with open('examples/join_short.csv') as f:
self.assertEqual(output.readlines(), f.readlines())
+ def test_no_blanks(self):
+ self.assertRows(['examples/blanks.csv', 'examples/blanks.csv'], [
+ ['a', 'b', 'c', 'd', 'e', 'f', 'a2', 'b2', 'c2', 'd2', 'e2', 'f2'],
+ ['', '', '', '', '', '', '', '', '', '', '', ''],
+ ])
+
+ def test_blanks(self):
+ self.assertRows(['--blanks', 'examples/blanks.csv', 'examples/blanks.csv'], [
+ ['a', 'b', 'c', 'd', 'e', 'f', 'a2', 'b2', 'c2', 'd2', 'e2', 'f2'],
+ ['', 'NA', 'N/A', 'NONE', 'NULL', '.', '', 'NA', 'N/A', 'NONE', 'NULL', '.'],
+ ])
+
def test_no_header_row(self):
output = self.get_output_as_io(['-c', '1', '--no-header-row', 'examples/join_a.csv', 'examples/join_no_header_row.csv'])
self.assertEqual(len(output.readlines()), 3)
diff --git a/tests/test_utilities/test_csvjson.py b/tests/test_utilities/test_csvjson.py
index 54299c5..25f37e4 100644
--- a/tests/test_utilities/test_csvjson.py
+++ b/tests/test_utilities/test_csvjson.py
@@ -30,6 +30,14 @@ class TestCSVJSON(CSVKitTestCase, EmptyFileTests):
js = json.loads(self.get_output(['examples/sniff_limit.csv']))
self.assertDictEqual(js[0], {'a': True, 'c': 3.0, 'b': 2.0})
+ def test_no_blanks(self):
+ js = json.loads(self.get_output(['examples/blanks.csv']))
+ self.assertDictEqual(js[0], {'a': None, 'b': None, 'c': None, 'd': None, 'e': None, 'f': None})
+
+ def test_blanks(self):
+ js = json.loads(self.get_output(['--blanks', 'examples/blanks.csv']))
+ self.assertDictEqual(js[0], {'a': '', 'b': 'NA', 'c': 'N/A', 'd': 'NONE', 'e': 'NULL', 'f': '.'})
+
def test_no_header_row(self):
js = json.loads(self.get_output(['--no-header-row', 'examples/no_header_row.csv']))
self.assertDictEqual(js[0], {'a': True, 'c': 3.0, 'b': 2.0})
diff --git a/tests/test_utilities/test_csvlook.py b/tests/test_utilities/test_csvlook.py
index e2b22fe..f7392f9 100644
--- a/tests/test_utilities/test_csvlook.py
+++ b/tests/test_utilities/test_csvlook.py
@@ -35,6 +35,20 @@ class TestCSVLook(CSVKitTestCase, EmptyFileTests):
'| True | 4 | 5 |',
])
+ def test_no_blanks(self):
+ self.assertLines(['examples/blanks.csv'], [
+ '| a | b | c | d | e | f |',
+ '| - | - | - | - | - | - |',
+ '| | | | | | |',
+ ])
+
+ def test_blanks(self):
+ self.assertLines(['--blanks', 'examples/blanks.csv'], [
+ '| a | b | c | d | e | f |',
+ '| - | -- | --- | ---- | ---- | - |',
+ '| | NA | N/A | NONE | NULL | . |',
+ ])
+
def test_no_header_row(self):
self.assertLines(['--no-header-row', 'examples/no_header_row3.csv'], [
'| a | b | c |',
diff --git a/tests/test_utilities/test_csvsort.py b/tests/test_utilities/test_csvsort.py
index 5e3a5b1..4b67a1a 100644
--- a/tests/test_utilities/test_csvsort.py
+++ b/tests/test_utilities/test_csvsort.py
@@ -39,10 +39,25 @@ class TestCSVSort(CSVKitTestCase, ColumnsTests, EmptyFileTests, NamesTests):
new_order = [six.text_type(r[0]) for r in reader]
self.assertEqual(test_order, new_order)
+ def test_no_blanks(self):
+ pass
+
+ def test_blanks(self):
+ reader = self.get_output_as_reader(['examples/blanks.csv'])
+ test_order = [
+ ['a', 'b', 'c', 'd', 'e', 'f'],
+ ['', '', '', '', '', ''],
+ ]
+ new_order = [r for r in reader]
+ self.assertEqual(test_order, new_order)
+
def test_no_header_row(self):
- reader = self.get_output_as_reader(['--no-header-row', '-c', '1', '-r', 'examples/no_header_row3.csv'])
- test_order = ['a', '4', '1']
- new_order = [six.text_type(r[0]) for r in reader]
+ reader = self.get_output_as_reader(['--blanks', 'examples/blanks.csv'])
+ test_order = [
+ ['a', 'b', 'c', 'd', 'e', 'f'],
+ ['', 'NA', 'N/A', 'NONE', 'NULL', '.'],
+ ]
+ new_order = [r for r in reader]
self.assertEqual(test_order, new_order)
def test_no_inference(self):
diff --git a/tests/test_utilities/test_csvsql.py b/tests/test_utilities/test_csvsql.py
index 4959a4b..aa7d7ad 100644
--- a/tests/test_utilities/test_csvsql.py
+++ b/tests/test_utilities/test_csvsql.py
@@ -44,6 +44,28 @@ class TestCSVSQL(CSVKitTestCase, EmptyFileTests):
self.assertTrue('time TIMESTAMP' in sql)
self.assertTrue('datetime TIMESTAMP' in sql)
+ def test_no_blanks(self):
+ sql = self.get_output(['--tables', 'foo', 'examples/blanks.csv'])
+
+ self.assertTrue('CREATE TABLE foo' in sql)
+ self.assertTrue('a BOOLEAN' in sql)
+ self.assertTrue('b BOOLEAN' in sql)
+ self.assertTrue('c BOOLEAN' in sql)
+ self.assertTrue('d BOOLEAN' in sql)
+ self.assertTrue('e BOOLEAN' in sql)
+ self.assertTrue('f BOOLEAN' in sql)
+
+ def test_blanks(self):
+ sql = self.get_output(['--tables', 'foo', '--blanks', 'examples/blanks.csv'])
+
+ self.assertTrue('CREATE TABLE foo' in sql)
+ self.assertTrue('a VARCHAR NOT NULL' in sql)
+ self.assertTrue('b VARCHAR(2) NOT NULL' in sql)
+ self.assertTrue('c VARCHAR(3) NOT NULL' in sql)
+ self.assertTrue('d VARCHAR(4) NOT NULL' in sql)
+ self.assertTrue('e VARCHAR(4) NOT NULL' in sql)
+ self.assertTrue('f VARCHAR(1) NOT NULL' in sql)
+
def test_no_inference(self):
sql = self.get_output(['--tables', 'foo', '--no-inference', 'examples/testfixed_converted.csv'])
diff --git a/tests/test_utilities/test_in2csv.py b/tests/test_utilities/test_in2csv.py
index 943d269..f9f454f 100644
--- a/tests/test_utilities/test_in2csv.py
+++ b/tests/test_utilities/test_in2csv.py
@@ -38,6 +38,12 @@ class TestIn2CSV(CSVKitTestCase, EmptyFileTests):
def test_locale(self):
self.assertConverted('csv', 'examples/test_locale.csv', 'examples/test_locale_converted.csv', ['--locale', 'de_DE'])
+ def test_no_blanks(self):
+ self.assertConverted('csv', 'examples/blanks.csv', 'examples/blanks_converted.csv')
+
+ def test_blanks(self):
+ self.assertConverted('csv', 'examples/blanks.csv', 'examples/blanks.csv', ['--blanks'])
+
def test_date_format(self):
self.assertConverted('csv', 'examples/test_date_format.csv', 'examples/test_date_format_converted.csv', ['--date-format', '%d/%m/%Y'])
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_added_files",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 9
} | 1.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"agate-excel",
"agate-sql"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | agate==1.13.0
agate-dbf==0.2.3
agate-excel==0.4.1
agate-sql==0.7.2
babel==2.17.0
-e git+https://github.com/wireservice/csvkit.git@53f4a2df4992e04796e411c6a23e005a92205c2d#egg=csvkit
dbfread==2.0.7
et_xmlfile==2.0.0
exceptiongroup==1.2.2
greenlet==3.1.1
iniconfig==2.1.0
isodate==0.7.2
leather==0.4.0
olefile==0.47
openpyxl==3.1.5
packaging==24.2
parsedatetime==2.6
pluggy==1.5.0
pytest==8.3.5
python-slugify==8.0.4
pytimeparse==1.1.8
six==1.17.0
SQLAlchemy==2.0.40
text-unidecode==1.3
tomli==2.2.1
typing_extensions==4.13.0
xlrd==2.0.1
| name: csvkit
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- agate==1.13.0
- agate-dbf==0.2.3
- agate-excel==0.4.1
- agate-sql==0.7.2
- babel==2.17.0
- dbfread==2.0.7
- et-xmlfile==2.0.0
- exceptiongroup==1.2.2
- greenlet==3.1.1
- iniconfig==2.1.0
- isodate==0.7.2
- leather==0.4.0
- olefile==0.47
- openpyxl==3.1.5
- packaging==24.2
- parsedatetime==2.6
- pluggy==1.5.0
- pytest==8.3.5
- python-slugify==8.0.4
- pytimeparse==1.1.8
- six==1.17.0
- sqlalchemy==2.0.40
- text-unidecode==1.3
- tomli==2.2.1
- typing-extensions==4.13.0
- xlrd==2.0.1
prefix: /opt/conda/envs/csvkit
| [
"tests/test_utilities/test_csvjoin.py::TestCSVJoin::test_blanks",
"tests/test_utilities/test_csvjoin.py::TestCSVJoin::test_no_blanks",
"tests/test_utilities/test_csvjson.py::TestCSVJSON::test_blanks",
"tests/test_utilities/test_csvjson.py::TestCSVJSON::test_no_blanks",
"tests/test_utilities/test_csvlook.py::TestCSVLook::test_blanks",
"tests/test_utilities/test_csvlook.py::TestCSVLook::test_no_blanks",
"tests/test_utilities/test_csvsort.py::TestCSVSort::test_blanks",
"tests/test_utilities/test_csvsort.py::TestCSVSort::test_no_header_row",
"tests/test_utilities/test_csvsql.py::TestCSVSQL::test_no_blanks",
"tests/test_utilities/test_in2csv.py::TestIn2CSV::test_blanks",
"tests/test_utilities/test_in2csv.py::TestIn2CSV::test_no_blanks"
]
| [
"tests/test_utilities/test_csvjson.py::TestCSVJSON::test_keying",
"tests/test_utilities/test_csvsql.py::TestCSVSQL::test_blanks",
"tests/test_utilities/test_csvsql.py::TestCSVSQL::test_create_table",
"tests/test_utilities/test_csvsql.py::TestCSVSQL::test_empty_with_query",
"tests/test_utilities/test_csvsql.py::TestCSVSQL::test_no_inference",
"tests/test_utilities/test_csvsql.py::TestCSVSQL::test_query",
"tests/test_utilities/test_csvsql.py::TestCSVSQL::test_query_file",
"tests/test_utilities/test_csvsql.py::TestCSVSQL::test_query_text",
"tests/test_utilities/test_csvsql.py::TestCSVSQL::test_query_with_prefix",
"tests/test_utilities/test_in2csv.py::TestIn2CSV::test_convert_dbf"
]
| [
"tests/test_utilities/test_csvjoin.py::TestCSVJoin::test_empty",
"tests/test_utilities/test_csvjoin.py::TestCSVJoin::test_inner",
"tests/test_utilities/test_csvjoin.py::TestCSVJoin::test_launch_new_instance",
"tests/test_utilities/test_csvjoin.py::TestCSVJoin::test_left",
"tests/test_utilities/test_csvjoin.py::TestCSVJoin::test_left_short_columns",
"tests/test_utilities/test_csvjoin.py::TestCSVJoin::test_no_header_row",
"tests/test_utilities/test_csvjoin.py::TestCSVJoin::test_no_inference",
"tests/test_utilities/test_csvjoin.py::TestCSVJoin::test_outer",
"tests/test_utilities/test_csvjoin.py::TestCSVJoin::test_right",
"tests/test_utilities/test_csvjoin.py::TestCSVJoin::test_sequential",
"tests/test_utilities/test_csvjoin.py::TestCSVJoin::test_sniff_limit_no_limit",
"tests/test_utilities/test_csvjoin.py::TestCSVJoin::test_sniff_limit_zero_limit",
"tests/test_utilities/test_csvjson.py::TestCSVJSON::test_duplicate_keys",
"tests/test_utilities/test_csvjson.py::TestCSVJSON::test_empty",
"tests/test_utilities/test_csvjson.py::TestCSVJSON::test_geojson",
"tests/test_utilities/test_csvjson.py::TestCSVJSON::test_geojson_with_crs",
"tests/test_utilities/test_csvjson.py::TestCSVJSON::test_geojson_with_id",
"tests/test_utilities/test_csvjson.py::TestCSVJSON::test_indentation",
"tests/test_utilities/test_csvjson.py::TestCSVJSON::test_launch_new_instance",
"tests/test_utilities/test_csvjson.py::TestCSVJSON::test_ndjson",
"tests/test_utilities/test_csvjson.py::TestCSVJSON::test_ndjson_with_no_inference",
"tests/test_utilities/test_csvjson.py::TestCSVJSON::test_no_header_row",
"tests/test_utilities/test_csvjson.py::TestCSVJSON::test_no_inference",
"tests/test_utilities/test_csvjson.py::TestCSVJSON::test_simple",
"tests/test_utilities/test_csvjson.py::TestCSVJSON::test_sniff_limit",
"tests/test_utilities/test_csvlook.py::TestCSVLook::test_empty",
"tests/test_utilities/test_csvlook.py::TestCSVLook::test_encoding",
"tests/test_utilities/test_csvlook.py::TestCSVLook::test_launch_new_instance",
"tests/test_utilities/test_csvlook.py::TestCSVLook::test_linenumbers",
"tests/test_utilities/test_csvlook.py::TestCSVLook::test_max_column_width",
"tests/test_utilities/test_csvlook.py::TestCSVLook::test_max_columns",
"tests/test_utilities/test_csvlook.py::TestCSVLook::test_max_rows",
"tests/test_utilities/test_csvlook.py::TestCSVLook::test_no_header_row",
"tests/test_utilities/test_csvlook.py::TestCSVLook::test_no_inference",
"tests/test_utilities/test_csvlook.py::TestCSVLook::test_runs",
"tests/test_utilities/test_csvlook.py::TestCSVLook::test_simple",
"tests/test_utilities/test_csvlook.py::TestCSVLook::test_sniff_limit_no_limit",
"tests/test_utilities/test_csvlook.py::TestCSVLook::test_sniff_limit_zero_limit",
"tests/test_utilities/test_csvlook.py::TestCSVLook::test_stdin",
"tests/test_utilities/test_csvlook.py::TestCSVLook::test_unicode",
"tests/test_utilities/test_csvsort.py::TestCSVSort::test_empty",
"tests/test_utilities/test_csvsort.py::TestCSVSort::test_encoding",
"tests/test_utilities/test_csvsort.py::TestCSVSort::test_invalid_column",
"tests/test_utilities/test_csvsort.py::TestCSVSort::test_invalid_options",
"tests/test_utilities/test_csvsort.py::TestCSVSort::test_launch_new_instance",
"tests/test_utilities/test_csvsort.py::TestCSVSort::test_names",
"tests/test_utilities/test_csvsort.py::TestCSVSort::test_no_blanks",
"tests/test_utilities/test_csvsort.py::TestCSVSort::test_no_inference",
"tests/test_utilities/test_csvsort.py::TestCSVSort::test_runs",
"tests/test_utilities/test_csvsort.py::TestCSVSort::test_sort_date",
"tests/test_utilities/test_csvsort.py::TestCSVSort::test_sort_string_reverse",
"tests/test_utilities/test_csvsort.py::TestCSVSort::test_sort_t_and_nulls",
"tests/test_utilities/test_csvsort.py::TestCSVSort::test_stdin",
"tests/test_utilities/test_csvsql.py::TestCSVSQL::test_empty",
"tests/test_utilities/test_csvsql.py::TestCSVSQL::test_launch_new_instance",
"tests/test_utilities/test_csvsql.py::TestCSVSQL::test_linenumbers",
"tests/test_utilities/test_csvsql.py::TestCSVSQL::test_no_header_row",
"tests/test_utilities/test_csvsql.py::TestCSVSQL::test_stdin",
"tests/test_utilities/test_csvsql.py::TestCSVSQL::test_stdin_and_filename",
"tests/test_utilities/test_in2csv.py::TestIn2CSV::test_convert_csv",
"tests/test_utilities/test_in2csv.py::TestIn2CSV::test_convert_csv_with_skip_lines",
"tests/test_utilities/test_in2csv.py::TestIn2CSV::test_convert_geojson",
"tests/test_utilities/test_in2csv.py::TestIn2CSV::test_convert_json",
"tests/test_utilities/test_in2csv.py::TestIn2CSV::test_convert_ndjson",
"tests/test_utilities/test_in2csv.py::TestIn2CSV::test_convert_nested_json",
"tests/test_utilities/test_in2csv.py::TestIn2CSV::test_convert_xls",
"tests/test_utilities/test_in2csv.py::TestIn2CSV::test_convert_xls_with_sheet",
"tests/test_utilities/test_in2csv.py::TestIn2CSV::test_convert_xls_with_skip_lines",
"tests/test_utilities/test_in2csv.py::TestIn2CSV::test_convert_xls_with_unicode_sheet",
"tests/test_utilities/test_in2csv.py::TestIn2CSV::test_convert_xls_with_write_sheets",
"tests/test_utilities/test_in2csv.py::TestIn2CSV::test_convert_xlsx",
"tests/test_utilities/test_in2csv.py::TestIn2CSV::test_convert_xlsx_with_sheet",
"tests/test_utilities/test_in2csv.py::TestIn2CSV::test_convert_xlsx_with_skip_lines",
"tests/test_utilities/test_in2csv.py::TestIn2CSV::test_convert_xlsx_with_unicode_sheet",
"tests/test_utilities/test_in2csv.py::TestIn2CSV::test_convert_xlsx_with_write_sheets",
"tests/test_utilities/test_in2csv.py::TestIn2CSV::test_csv_datetime_inference",
"tests/test_utilities/test_in2csv.py::TestIn2CSV::test_csv_no_headers",
"tests/test_utilities/test_in2csv.py::TestIn2CSV::test_csv_no_inference",
"tests/test_utilities/test_in2csv.py::TestIn2CSV::test_date_format",
"tests/test_utilities/test_in2csv.py::TestIn2CSV::test_empty",
"tests/test_utilities/test_in2csv.py::TestIn2CSV::test_geojson_no_inference",
"tests/test_utilities/test_in2csv.py::TestIn2CSV::test_json_no_inference",
"tests/test_utilities/test_in2csv.py::TestIn2CSV::test_launch_new_instance",
"tests/test_utilities/test_in2csv.py::TestIn2CSV::test_locale",
"tests/test_utilities/test_in2csv.py::TestIn2CSV::test_names_xls",
"tests/test_utilities/test_in2csv.py::TestIn2CSV::test_names_xlsx",
"tests/test_utilities/test_in2csv.py::TestIn2CSV::test_ndjson_no_inference",
"tests/test_utilities/test_in2csv.py::TestIn2CSV::test_version",
"tests/test_utilities/test_in2csv.py::TestIn2CSV::test_xls_no_inference",
"tests/test_utilities/test_in2csv.py::TestIn2CSV::test_xlsx_no_inference"
]
| []
| MIT License | 1,096 | [
"examples/blanks_converted.csv",
"csvkit/utilities/csvgrep.py",
"csvkit/utilities/csvsql.py",
"csvkit/utilities/sql2csv.py",
"docs/common_arguments.rst",
"CHANGELOG.rst",
"csvkit/cli.py",
"csvkit/utilities/csvstack.py",
"csvkit/utilities/csvcut.py",
"examples/blanks.csv",
"docs/scripts/csvsql.rst"
]
| [
"examples/blanks_converted.csv",
"csvkit/utilities/csvgrep.py",
"csvkit/utilities/csvsql.py",
"csvkit/utilities/sql2csv.py",
"docs/common_arguments.rst",
"CHANGELOG.rst",
"csvkit/cli.py",
"csvkit/utilities/csvstack.py",
"csvkit/utilities/csvcut.py",
"examples/blanks.csv",
"docs/scripts/csvsql.rst"
]
|
|
zhmcclient__python-zhmcclient-217 | c52016859aae083f10ef7be7ebef87b60a54554c | 2017-03-16 17:50:02 | 63bfc356570b865f4eac1d6a37c62e7b018520fc | diff --git a/docs/changes.rst b/docs/changes.rst
index 3072f69..7b7c868 100644
--- a/docs/changes.rst
+++ b/docs/changes.rst
@@ -31,6 +31,8 @@ Released: not yet
**Bug fixes:**
+* Added WWPN support in mocking framework (issue #212).
+
**Enhancements:**
**Known Issues:**
diff --git a/zhmcclient_mock/_hmc.py b/zhmcclient_mock/_hmc.py
index 961ee67..450855e 100644
--- a/zhmcclient_mock/_hmc.py
+++ b/zhmcclient_mock/_hmc.py
@@ -774,6 +774,9 @@ class FakedHbaManager(FakedBaseManager):
if 'device-number' not in new_hba.properties:
devno = partition.devno_alloc()
new_hba.properties['device-number'] = devno
+ if 'wwpn' not in new_hba.properties:
+ wwpn = partition.wwpn_alloc()
+ new_hba.properties['wwpn'] = wwpn
return new_hba
def remove(self, oid):
@@ -793,6 +796,9 @@ class FakedHbaManager(FakedBaseManager):
devno = hba.properties.get('device-number', None)
if devno:
partition.devno_free_if_allocated(devno)
+ wwpn = hba.properties.get('wwpn', None)
+ if wwpn:
+ partition.wwpn_free_if_allocated(wwpn)
assert 'hba-uris' in partition.properties
hba_uris = partition.properties['hba-uris']
hba_uris.remove(hba.uri)
@@ -1049,6 +1055,7 @@ class FakedPartition(FakedBaseResource):
self._virtual_functions = FakedVirtualFunctionManager(
hmc=manager.hmc, partition=self)
self._devno_pool = IdPool(0x8000, 0xFFFF)
+ self._wwpn_pool = IdPool(0x8000, 0xFFFF)
@property
def nics(self):
@@ -1118,6 +1125,51 @@ class FakedPartition(FakedBaseResource):
devno_int = int(devno, 16)
self._devno_pool.free_if_allocated(devno_int)
+ def wwpn_alloc(self):
+ """
+ Allocates a WWPN unique to this partition, in the range of
+ 0xAFFEAFFE00008000 to 0xAFFEAFFE0000FFFF.
+
+ Returns:
+ string: The WWPN as 16 hexadecimal digits in upper case.
+
+ Raises:
+ ValueError: No more WWPNs available in that range.
+ """
+ wwpn_int = self._wwpn_pool.alloc()
+ wwpn = "AFFEAFFE0000" + "{:04X}".format(wwpn_int)
+ return wwpn
+
+ def wwpn_free(self, wwpn):
+ """
+ Free a WWPN allocated with :meth:`wwpn_alloc`.
+
+ The WWPN must be allocated.
+
+ Parameters:
+ WWPN (string): The WWPN as 16 hexadecimal digits.
+
+ Raises:
+ ValueError: WWPN not in pool range or not currently
+ allocated.
+ """
+ wwpn_int = int(wwpn[-4:], 16)
+ self._wwpn_pool.free(wwpn_int)
+
+ def wwpn_free_if_allocated(self, wwpn):
+
+ """
+ Free a WWPN allocated with :meth:`wwpn_alloc`.
+
+ If the WWPN is not currently allocated or not in the pool
+ range, nothing happens.
+
+ Parameters:
+ WWPN (string): The WWPN as 16 hexadecimal digits.
+ """
+ wwpn_int = int(wwpn[-4:], 16)
+ self._wwpn_pool.free_if_allocated(wwpn_int)
+
class FakedPortManager(FakedBaseManager):
"""
| WWPN is not attaching in mock frmework
In mock framework when we create hba then it should also attach 'wwpn'. But as of now it is not attaching.
self.partition.hbas[0].get_property('wwpn')
| zhmcclient/python-zhmcclient | diff --git a/tests/unit/zhmcclient_mock/test_urihandler.py b/tests/unit/zhmcclient_mock/test_urihandler.py
index eecbb13..6febc10 100755
--- a/tests/unit/zhmcclient_mock/test_urihandler.py
+++ b/tests/unit/zhmcclient_mock/test_urihandler.py
@@ -453,7 +453,7 @@ def standard_test_hmc():
'description': 'HBA #1 in Partition #1',
'adapter-port-uri':
'/api/adapters/2/storage-ports/1',
- 'wwpn': 'wwpn_1',
+ 'wwpn': 'CFFEAFFE00008001',
'device-number': '1001',
},
},
@@ -782,7 +782,7 @@ class CpcExportPortNamesListHandlerTests(unittest.TestCase):
]
}
exp_wwpn_list = [
- 'partition_1,2,1001,wwpn_1',
+ 'partition_1,2,1001,CFFEAFFE00008001',
]
# the function to be tested:
@@ -1108,7 +1108,7 @@ class HbaHandlerTests(unittest.TestCase):
'name': 'hba_1',
'description': 'HBA #1 in Partition #1',
'adapter-port-uri': '/api/adapters/2/storage-ports/1',
- 'wwpn': 'wwpn_1',
+ 'wwpn': 'CFFEAFFE00008001',
'device-number': '1001',
}
self.assertEqual(hba1, exp_hba1)
@@ -1138,6 +1138,7 @@ class HbaHandlerTests(unittest.TestCase):
'name': 'hba_2',
'adapter-port-uri': '/api/adapters/2/storage-ports/1',
'device-number': hba2['device-number'], # auto-generated
+ 'wwpn': hba2['wwpn'], # auto-generated
}
self.assertEqual(hba2, exp_hba2)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 3,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 2
} | 0.11 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-mock",
"pytest-xdist",
"pytest-asyncio",
"mock"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2025.1.31
charset-normalizer==3.4.1
click==8.1.8
click-repl==0.3.0
click-spinner==0.1.10
coverage==7.8.0
decorator==5.2.1
docopt==0.6.2
exceptiongroup==1.2.2
execnet==2.1.1
idna==3.10
iniconfig==2.1.0
mock==5.2.0
packaging==24.2
pbr==6.1.1
pluggy==1.5.0
progressbar2==4.5.0
prompt_toolkit==3.0.50
pytest==8.3.5
pytest-asyncio==0.26.0
pytest-cov==6.0.0
pytest-mock==3.14.0
pytest-xdist==3.6.1
python-utils==3.9.1
requests==2.32.3
six==1.17.0
stomp.py==8.2.0
tabulate==0.9.0
tomli==2.2.1
typing_extensions==4.13.0
urllib3==2.3.0
wcwidth==0.2.13
websocket-client==1.8.0
-e git+https://github.com/zhmcclient/python-zhmcclient.git@c52016859aae083f10ef7be7ebef87b60a54554c#egg=zhmcclient
| name: python-zhmcclient
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- charset-normalizer==3.4.1
- click==8.1.8
- click-repl==0.3.0
- click-spinner==0.1.10
- coverage==7.8.0
- decorator==5.2.1
- docopt==0.6.2
- exceptiongroup==1.2.2
- execnet==2.1.1
- idna==3.10
- iniconfig==2.1.0
- mock==5.2.0
- packaging==24.2
- pbr==6.1.1
- pluggy==1.5.0
- progressbar2==4.5.0
- prompt-toolkit==3.0.50
- pytest==8.3.5
- pytest-asyncio==0.26.0
- pytest-cov==6.0.0
- pytest-mock==3.14.0
- pytest-xdist==3.6.1
- python-utils==3.9.1
- requests==2.32.3
- six==1.17.0
- stomp-py==8.2.0
- tabulate==0.9.0
- tomli==2.2.1
- typing-extensions==4.13.0
- urllib3==2.3.0
- wcwidth==0.2.13
- websocket-client==1.8.0
prefix: /opt/conda/envs/python-zhmcclient
| [
"tests/unit/zhmcclient_mock/test_urihandler.py::HbaHandlerTests::test_create_verify"
]
| []
| [
"tests/unit/zhmcclient_mock/test_urihandler.py::HTTPErrorTests::test_attributes",
"tests/unit/zhmcclient_mock/test_urihandler.py::HTTPErrorTests::test_response",
"tests/unit/zhmcclient_mock/test_urihandler.py::InvalidResourceErrorTests::test_attributes_no_handler",
"tests/unit/zhmcclient_mock/test_urihandler.py::InvalidResourceErrorTests::test_attributes_with_handler",
"tests/unit/zhmcclient_mock/test_urihandler.py::InvalidMethodErrorTests::test_attributes_no_handler",
"tests/unit/zhmcclient_mock/test_urihandler.py::InvalidMethodErrorTests::test_attributes_with_handler",
"tests/unit/zhmcclient_mock/test_urihandler.py::CpcNotInDpmErrorTests::test_attributes",
"tests/unit/zhmcclient_mock/test_urihandler.py::CpcInDpmErrorTests::test_attributes",
"tests/unit/zhmcclient_mock/test_urihandler.py::UriHandlerHandlerEmptyTests::test_uris_empty_1",
"tests/unit/zhmcclient_mock/test_urihandler.py::UriHandlerHandlerEmptyTests::test_uris_empty_2",
"tests/unit/zhmcclient_mock/test_urihandler.py::UriHandlerHandlerSimpleTests::test_err_begin_extra",
"tests/unit/zhmcclient_mock/test_urihandler.py::UriHandlerHandlerSimpleTests::test_err_begin_missing",
"tests/unit/zhmcclient_mock/test_urihandler.py::UriHandlerHandlerSimpleTests::test_err_end2_extra",
"tests/unit/zhmcclient_mock/test_urihandler.py::UriHandlerHandlerSimpleTests::test_err_end2_missing",
"tests/unit/zhmcclient_mock/test_urihandler.py::UriHandlerHandlerSimpleTests::test_err_end2_slash",
"tests/unit/zhmcclient_mock/test_urihandler.py::UriHandlerHandlerSimpleTests::test_err_end_extra",
"tests/unit/zhmcclient_mock/test_urihandler.py::UriHandlerHandlerSimpleTests::test_err_end_missing",
"tests/unit/zhmcclient_mock/test_urihandler.py::UriHandlerHandlerSimpleTests::test_err_end_slash",
"tests/unit/zhmcclient_mock/test_urihandler.py::UriHandlerHandlerSimpleTests::test_ok1",
"tests/unit/zhmcclient_mock/test_urihandler.py::UriHandlerHandlerSimpleTests::test_ok2",
"tests/unit/zhmcclient_mock/test_urihandler.py::UriHandlerHandlerSimpleTests::test_ok3",
"tests/unit/zhmcclient_mock/test_urihandler.py::UriHandlerMethodTests::test_delete_cpc2",
"tests/unit/zhmcclient_mock/test_urihandler.py::UriHandlerMethodTests::test_get_cpc1",
"tests/unit/zhmcclient_mock/test_urihandler.py::UriHandlerMethodTests::test_get_cpcs",
"tests/unit/zhmcclient_mock/test_urihandler.py::UriHandlerMethodTests::test_post_cpcs",
"tests/unit/zhmcclient_mock/test_urihandler.py::GenericGetPropertiesHandlerTests::test_get",
"tests/unit/zhmcclient_mock/test_urihandler.py::GenericUpdatePropertiesHandlerTests::test_update_verify",
"tests/unit/zhmcclient_mock/test_urihandler.py::VersionHandlerTests::test_get_version",
"tests/unit/zhmcclient_mock/test_urihandler.py::CpcHandlersTests::test_get",
"tests/unit/zhmcclient_mock/test_urihandler.py::CpcHandlersTests::test_list",
"tests/unit/zhmcclient_mock/test_urihandler.py::CpcHandlersTests::test_update_verify",
"tests/unit/zhmcclient_mock/test_urihandler.py::CpcStartStopHandlerTests::test_start_classic",
"tests/unit/zhmcclient_mock/test_urihandler.py::CpcStartStopHandlerTests::test_stop_classic",
"tests/unit/zhmcclient_mock/test_urihandler.py::CpcStartStopHandlerTests::test_stop_start_dpm",
"tests/unit/zhmcclient_mock/test_urihandler.py::CpcExportPortNamesListHandlerTests::test_invoke_err_no_input",
"tests/unit/zhmcclient_mock/test_urihandler.py::CpcExportPortNamesListHandlerTests::test_invoke_ok",
"tests/unit/zhmcclient_mock/test_urihandler.py::AdapterHandlersTests::test_get",
"tests/unit/zhmcclient_mock/test_urihandler.py::AdapterHandlersTests::test_list",
"tests/unit/zhmcclient_mock/test_urihandler.py::AdapterHandlersTests::test_update_verify",
"tests/unit/zhmcclient_mock/test_urihandler.py::NetworkPortHandlersTests::test_get",
"tests/unit/zhmcclient_mock/test_urihandler.py::NetworkPortHandlersTests::test_update_verify",
"tests/unit/zhmcclient_mock/test_urihandler.py::StoragePortHandlersTests::test_get",
"tests/unit/zhmcclient_mock/test_urihandler.py::StoragePortHandlersTests::test_update_verify",
"tests/unit/zhmcclient_mock/test_urihandler.py::PartitionHandlersTests::test_create_verify",
"tests/unit/zhmcclient_mock/test_urihandler.py::PartitionHandlersTests::test_delete_verify",
"tests/unit/zhmcclient_mock/test_urihandler.py::PartitionHandlersTests::test_get",
"tests/unit/zhmcclient_mock/test_urihandler.py::PartitionHandlersTests::test_list",
"tests/unit/zhmcclient_mock/test_urihandler.py::PartitionHandlersTests::test_update_verify",
"tests/unit/zhmcclient_mock/test_urihandler.py::PartitionStartStopHandlerTests::test_start_stop",
"tests/unit/zhmcclient_mock/test_urihandler.py::HbaHandlerTests::test_delete_verify",
"tests/unit/zhmcclient_mock/test_urihandler.py::HbaHandlerTests::test_get",
"tests/unit/zhmcclient_mock/test_urihandler.py::HbaHandlerTests::test_list",
"tests/unit/zhmcclient_mock/test_urihandler.py::HbaHandlerTests::test_update_verify",
"tests/unit/zhmcclient_mock/test_urihandler.py::NicHandlerTests::test_create_verify",
"tests/unit/zhmcclient_mock/test_urihandler.py::NicHandlerTests::test_delete_verify",
"tests/unit/zhmcclient_mock/test_urihandler.py::NicHandlerTests::test_get",
"tests/unit/zhmcclient_mock/test_urihandler.py::NicHandlerTests::test_list",
"tests/unit/zhmcclient_mock/test_urihandler.py::NicHandlerTests::test_update_verify",
"tests/unit/zhmcclient_mock/test_urihandler.py::VirtualFunctionHandlerTests::test_create_verify",
"tests/unit/zhmcclient_mock/test_urihandler.py::VirtualFunctionHandlerTests::test_delete_verify",
"tests/unit/zhmcclient_mock/test_urihandler.py::VirtualFunctionHandlerTests::test_get",
"tests/unit/zhmcclient_mock/test_urihandler.py::VirtualFunctionHandlerTests::test_list",
"tests/unit/zhmcclient_mock/test_urihandler.py::VirtualFunctionHandlerTests::test_update_verify",
"tests/unit/zhmcclient_mock/test_urihandler.py::VirtualSwitchHandlersTests::test_get",
"tests/unit/zhmcclient_mock/test_urihandler.py::VirtualSwitchHandlersTests::test_list",
"tests/unit/zhmcclient_mock/test_urihandler.py::LparHandlersTests::test_get",
"tests/unit/zhmcclient_mock/test_urihandler.py::LparHandlersTests::test_list",
"tests/unit/zhmcclient_mock/test_urihandler.py::LparActLoadDeactHandlerTests::test_start_stop",
"tests/unit/zhmcclient_mock/test_urihandler.py::ResetActProfileHandlersTests::test_get",
"tests/unit/zhmcclient_mock/test_urihandler.py::ResetActProfileHandlersTests::test_list",
"tests/unit/zhmcclient_mock/test_urihandler.py::ImageActProfileHandlersTests::test_get",
"tests/unit/zhmcclient_mock/test_urihandler.py::ImageActProfileHandlersTests::test_list",
"tests/unit/zhmcclient_mock/test_urihandler.py::LoadActProfileHandlersTests::test_get",
"tests/unit/zhmcclient_mock/test_urihandler.py::LoadActProfileHandlersTests::test_list"
]
| []
| Apache License 2.0 | 1,097 | [
"docs/changes.rst",
"zhmcclient_mock/_hmc.py"
]
| [
"docs/changes.rst",
"zhmcclient_mock/_hmc.py"
]
|
|
Telefonica__toolium-70 | e35c53831bd3a61c40cd3e13dd972783c24ff664 | 2017-03-17 09:19:45 | e35c53831bd3a61c40cd3e13dd972783c24ff664 | diff --git a/CHANGELOG.rst b/CHANGELOG.rst
index 355d22e..8ecab67 100644
--- a/CHANGELOG.rst
+++ b/CHANGELOG.rst
@@ -1,6 +1,13 @@
Toolium Changelog
=================
+v1.2.4
+------
+
+*Release date: 2017-03-17*
+
+- Fix NoSuchElementException error finding elements in nested groups
+
v1.2.3
------
diff --git a/VERSION b/VERSION
index 0495c4a..e8ea05d 100644
--- a/VERSION
+++ b/VERSION
@@ -1,1 +1,1 @@
-1.2.3
+1.2.4
diff --git a/toolium/pageelements/group_page_element.py b/toolium/pageelements/group_page_element.py
index ae5c376..8f0c0d2 100644
--- a/toolium/pageelements/group_page_element.py
+++ b/toolium/pageelements/group_page_element.py
@@ -50,11 +50,12 @@ class Group(PageObject, PageElement):
:param driver_wrapper: driver wrapper instance
"""
+ from toolium.pageelements.page_elements import PageElements
if driver_wrapper:
self.driver_wrapper = driver_wrapper
self._web_element = None
for element in self._get_page_elements():
element.reset_object(driver_wrapper)
- if not isinstance(element, PageObject):
+ if isinstance(element, (PageElement, PageElements)):
# If element is not a page object, update element parent
element.parent = self
diff --git a/toolium/pageobjects/page_object.py b/toolium/pageobjects/page_object.py
index d367d0c..84d4a4f 100644
--- a/toolium/pageobjects/page_object.py
+++ b/toolium/pageobjects/page_object.py
@@ -60,10 +60,8 @@ class PageObject(CommonObject):
:returns: list of page elements and page objects
"""
- from toolium.pageelements.page_element import PageElement
- from toolium.pageelements.page_elements import PageElements
page_elements = []
for attribute, value in list(self.__dict__.items()) + list(self.__class__.__dict__.items()):
- if attribute != 'parent' and isinstance(value, (PageElement, PageElements, PageObject)):
+ if attribute != 'parent' and isinstance(value, CommonObject):
page_elements.append(value)
return page_elements
| Elements not found in nested groups
When you have several nested groups the parent parameter is not correctly filled and sometimes the elements of a group cannot be found and "NoSuchElementException" is given.
Defined groups example:
class ThirdGroup(Group):
def init_page_elements(self):
self.element3 = Button(By.XPATH, './/button[@class="three"]')
class SecondGroup(Group):
def init_page_elements(self):
self.element2 = ThirdGroup(By.XPATH, './/button[@class="two"]')
self.test_element = Button(By.XPATH, './/button[@class="two"]//button[@class="three"]')
class FirstGroup(Group):
def init_page_elements(self):
self.element1 = SecondGroup(By.XPATH, './/button[@class="first"]')
class ThisPageObject(PageObject):
def init_page_elements(self):
self.my_first_group` = FirstGroup(By.XPATH, '//div')
Elements accessing example:
visible1 = self.my_first_group.element1.element2.element3.web_element.is_displayed()
visible2 = self.my_first_group.element1.test_element.web_element.is_displayed()
That first line (visible1) gives a NoSuchElementException, but the last line (visible2) finds the element and returns the correct result. The element accessed in both cases has the same XPATH, but in the first case there is one more nested group than in the second one.
| Telefonica/toolium | diff --git a/toolium/test/pageelements/test_derived_page_element.py b/toolium/test/pageelements/test_derived_page_element.py
index 57ef942..5701f3e 100644
--- a/toolium/test/pageelements/test_derived_page_element.py
+++ b/toolium/test/pageelements/test_derived_page_element.py
@@ -150,18 +150,18 @@ def test_group_reset_object(driver_wrapper):
# Check that web elements are empty
assert login_page.menu._web_element is None
assert login_page.menu.logo._web_element is None
- assert login_page.menu.logo.parent._web_element is None
+ assert login_page.menu.logo.parent == login_page.menu
login_page.menu.logo.web_element
# Check that web elements are filled
assert login_page.menu._web_element is not None
assert login_page.menu.logo._web_element is not None
- assert login_page.menu.logo.parent._web_element is not None
+ assert login_page.menu.logo.parent == login_page.menu
login_page.menu.reset_object()
# Check that web elements are empty
assert login_page.menu._web_element is None
assert login_page.menu.logo._web_element is None
- assert login_page.menu.logo.parent._web_element is None
+ assert login_page.menu.logo.parent == login_page.menu
diff --git a/toolium/test/pageelements/test_page_elements_groups.py b/toolium/test/pageelements/test_page_elements_groups.py
index 347d67a..bcea69c 100644
--- a/toolium/test/pageelements/test_page_elements_groups.py
+++ b/toolium/test/pageelements/test_page_elements_groups.py
@@ -108,6 +108,15 @@ def test_reset_object_page_elements_groups(driver_wrapper):
assert column_21.input._web_element is not None
assert column_21.link._web_element is not None
assert column_21.input_with_parent._web_element is not None
+ # Check that the group elements have the group as parent
+ assert column_11.parent == row_1
+ assert column_21.parent == row_2
+ assert column_11.input.parent == column_11
+ assert column_11.link.parent == column_11
+ assert column_11.input_with_parent.parent == column_11
+ assert column_21.input.parent == column_21
+ assert column_21.link.parent == column_21
+ assert column_21.input_with_parent.parent == column_21
table_page.reset_object()
@@ -128,3 +137,12 @@ def test_reset_object_page_elements_groups(driver_wrapper):
assert column_21.input._web_element is None
assert column_21.link._web_element is None
assert column_21.input_with_parent._web_element is None
+ # Check that the group elements have the group as parent
+ assert column_11.parent == row_1
+ assert column_21.parent == row_2
+ assert column_11.input.parent == column_11
+ assert column_11.link.parent == column_11
+ assert column_11.input_with_parent.parent == column_11
+ assert column_21.input.parent == column_21
+ assert column_21.link.parent == column_21
+ assert column_21.input_with_parent.parent == column_21
diff --git a/toolium/test/pageelements/test_page_nested_groups.py b/toolium/test/pageelements/test_page_nested_groups.py
new file mode 100644
index 0000000..4a4b0cf
--- /dev/null
+++ b/toolium/test/pageelements/test_page_nested_groups.py
@@ -0,0 +1,99 @@
+# -*- coding: utf-8 -*-
+u"""
+Copyright 2017 Telefónica Investigación y Desarrollo, S.A.U.
+This file is part of Toolium.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+import mock
+import pytest
+from selenium.webdriver.common.by import By
+from selenium.webdriver.remote.webelement import WebElement
+
+from toolium.driver_wrapper import DriverWrapper
+from toolium.driver_wrappers_pool import DriverWrappersPool
+from toolium.pageelements import PageElements, Group, InputText, Link
+from toolium.pageobjects.page_object import PageObject
+
+
+class InnerGroup(Group):
+ def init_page_elements(self):
+ self.input = InputText(By.XPATH, './/input')
+ self.input_with_parent = InputText(By.XPATH, './/input', (By.XPATH, './/parent'))
+
+
+class OuterGroup(Group):
+ def init_page_elements(self):
+ self.inner = InnerGroup(By.XPATH, './/div')
+
+
+class NestedPageObject(PageObject):
+ def init_page_elements(self):
+ self.outer = OuterGroup(By.XPATH, '//div')
+
+
[email protected]
+def driver_wrapper():
+ # Reset wrappers pool values
+ DriverWrappersPool._empty_pool()
+ DriverWrapper.config_properties_filenames = None
+
+ # Create a new wrapper
+ driver_wrapper = DriverWrappersPool.get_default_wrapper()
+ driver_wrapper.driver = mock.MagicMock()
+
+ return driver_wrapper
+
+
+def test_reset_object_nested_groups(driver_wrapper):
+ # Mock Driver.save_web_element = True
+ driver_wrapper.config = mock.MagicMock()
+ driver_wrapper.config.getboolean_optional.return_value = True
+ # Create mock element
+ mock_element = mock.MagicMock(spec=WebElement)
+ driver_wrapper.driver.find_element.return_value = mock_element
+
+ nested_page = NestedPageObject()
+
+ # Check that web elements are empty
+ assert nested_page.outer._web_element is None
+ assert nested_page.outer.inner._web_element is None
+ assert nested_page.outer.inner.input._web_element is None
+ assert nested_page.outer.inner.input_with_parent._web_element is None
+ assert nested_page.outer.inner.parent == nested_page.outer
+ assert nested_page.outer.inner.input.parent == nested_page.outer.inner
+ assert nested_page.outer.inner.input_with_parent.parent == nested_page.outer.inner
+
+ nested_page.outer.inner.input.web_element
+ nested_page.outer.inner.input_with_parent.web_element
+
+ # Check that web elements are filled
+ assert nested_page.outer._web_element is not None
+ assert nested_page.outer.inner._web_element is not None
+ assert nested_page.outer.inner.input._web_element is not None
+ assert nested_page.outer.inner.input_with_parent._web_element is not None
+ assert nested_page.outer.inner.parent == nested_page.outer
+ assert nested_page.outer.inner.input.parent == nested_page.outer.inner
+ assert nested_page.outer.inner.input_with_parent.parent == nested_page.outer.inner
+
+ nested_page.outer.reset_object()
+
+ # Check that web elements are empty
+ assert nested_page.outer._web_element is None
+ assert nested_page.outer.inner._web_element is None
+ assert nested_page.outer.inner.input._web_element is None
+ assert nested_page.outer.inner.input_with_parent._web_element is None
+ assert nested_page.outer.inner.parent == nested_page.outer
+ assert nested_page.outer.inner.input.parent == nested_page.outer.inner
+ assert nested_page.outer.inner.input_with_parent.parent == nested_page.outer.inner
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 4
} | 1.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-xdist",
"pytest-mock",
"pytest-asyncio"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.6",
"reqs_path": [
"requirements.txt",
"requirements_dev.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.13
Appium-Python-Client==1.3.0
attrs==22.2.0
Babel==2.11.0
behave==1.2.5
certifi==2021.5.30
charset-normalizer==2.0.12
coverage==6.2
coveralls==1.1
docopt==0.6.2
docutils==0.18.1
execnet==1.9.0
extras==1.0.0
fixtures==4.0.1
fuzzywuzzy==0.18.0
idna==3.10
imagesize==1.4.1
importlib-metadata==4.8.3
iniconfig==1.1.1
Jinja2==3.0.3
lettuce==0.2.23
MarkupSafe==2.0.1
mock==2.0.0
needle==0.4.1
nose==1.3.7
packaging==21.3
parse==1.20.2
parse_type==0.6.4
pbr==6.1.1
Pillow==8.4.0
pluggy==1.0.0
py==1.11.0
Pygments==2.14.0
pyparsing==3.1.4
pytest==7.0.1
pytest-asyncio==0.16.0
pytest-cov==4.0.0
pytest-mock==3.6.1
pytest-xdist==3.0.2
python-subunit==1.4.2
pytz==2025.2
requests==2.27.1
requests-mock==1.2.0
selenium==3.141.0
six==1.17.0
snowballstemmer==2.2.0
Sphinx==1.5.1
sure==2.0.1
testtools==2.6.0
tomli==1.2.3
-e git+https://github.com/Telefonica/toolium.git@e35c53831bd3a61c40cd3e13dd972783c24ff664#egg=toolium
typing_extensions==4.1.1
urllib3==1.26.20
zipp==3.6.0
| name: toolium
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.13
- appium-python-client==1.3.0
- attrs==22.2.0
- babel==2.11.0
- behave==1.2.5
- charset-normalizer==2.0.12
- coverage==6.2
- coveralls==1.1
- docopt==0.6.2
- docutils==0.18.1
- execnet==1.9.0
- extras==1.0.0
- fixtures==4.0.1
- fuzzywuzzy==0.18.0
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- jinja2==3.0.3
- lettuce==0.2.23
- markupsafe==2.0.1
- mock==2.0.0
- needle==0.4.1
- nose==1.3.7
- packaging==21.3
- parse==1.20.2
- parse-type==0.6.4
- pbr==6.1.1
- pillow==8.4.0
- pluggy==1.0.0
- py==1.11.0
- pygments==2.14.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-asyncio==0.16.0
- pytest-cov==4.0.0
- pytest-mock==3.6.1
- pytest-xdist==3.0.2
- python-subunit==1.4.2
- pytz==2025.2
- requests==2.27.1
- requests-mock==1.2.0
- selenium==3.141.0
- six==1.17.0
- snowballstemmer==2.2.0
- sphinx==1.5.1
- sure==2.0.1
- testtools==2.6.0
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- zipp==3.6.0
prefix: /opt/conda/envs/toolium
| [
"toolium/test/pageelements/test_page_nested_groups.py::test_reset_object_nested_groups"
]
| []
| [
"toolium/test/pageelements/test_derived_page_element.py::test_locator",
"toolium/test/pageelements/test_derived_page_element.py::test_get_text",
"toolium/test/pageelements/test_derived_page_element.py::test_get_input_text",
"toolium/test/pageelements/test_derived_page_element.py::test_set_input_text",
"toolium/test/pageelements/test_derived_page_element.py::test_get_selected_option",
"toolium/test/pageelements/test_derived_page_element.py::test_set_option",
"toolium/test/pageelements/test_derived_page_element.py::test_click_button",
"toolium/test/pageelements/test_derived_page_element.py::test_group_reset_object",
"toolium/test/pageelements/test_page_elements_groups.py::test_reset_object_page_elements_groups"
]
| []
| Apache License 2.0 | 1,098 | [
"VERSION",
"CHANGELOG.rst",
"toolium/pageobjects/page_object.py",
"toolium/pageelements/group_page_element.py"
]
| [
"VERSION",
"CHANGELOG.rst",
"toolium/pageobjects/page_object.py",
"toolium/pageelements/group_page_element.py"
]
|
|
tox-dev__tox-486 | 5df724f06f6dda3b9b5f3e4e80a7f9dcaa1c823b | 2017-03-18 22:02:14 | 5df724f06f6dda3b9b5f3e4e80a7f9dcaa1c823b | diff --git a/doc/config.txt b/doc/config.txt
index dc13ba82..3b6d66dc 100644
--- a/doc/config.txt
+++ b/doc/config.txt
@@ -212,10 +212,10 @@ Complete list of settings that you can put into ``testenv*`` sections:
You can use ``*`` and ``?`` to match multiple environment variables with
one name.
- Note that the ``PATH``, ``LANG`` and ``PIP_INDEX_URL`` variables are
- unconditionally passed down and on Windows ``SYSTEMROOT``, ``PATHEXT``,
- ``TEMP`` and ``TMP`` will be passed down as well whereas on unix
- ``TMPDIR`` will be passed down. You can override these variables
+ Note that the ``PATH``, ``LANG``, ``LANGUAGE`` and ``PIP_INDEX_URL``
+ variables are unconditionally passed down and on Windows ``SYSTEMROOT``,
+ ``PATHEXT``, ``TEMP`` and ``TMP`` will be passed down as well whereas on
+ unix ``TMPDIR`` will be passed down. You can override these variables
with the ``setenv`` option.
If defined the ``TOX_TESTENV_PASSENV`` environment variable (in the tox
diff --git a/tox/config.py b/tox/config.py
index a02c34ea..a66e5892 100755
--- a/tox/config.py
+++ b/tox/config.py
@@ -454,7 +454,9 @@ def tox_addoption(parser):
itertools.chain.from_iterable(
[x.split(' ') for x in value]))
- passenv = set(["PATH", "PIP_INDEX_URL", "LANG", "LD_LIBRARY_PATH"])
+ passenv = set([
+ "PATH", "PIP_INDEX_URL", "LANG", "LANGUAGE", "LD_LIBRARY_PATH"
+ ])
# read in global passenv settings
p = os.environ.get("TOX_TESTENV_PASSENV", None)
| Add `LANGUAGE` env var to default pass list
Since tox 2.0.2 `LANG` is automatically passed to avoid Unicode(En|De)codeErrors. I've just now encountered a system (macOS) where `LANGUAGE` was used instead of `LANG`, again causing test breakage.
I would suggest to add `LANGUAGE` to the list of default pass variables. | tox-dev/tox | diff --git a/tests/test_config.py b/tests/test_config.py
index a7563751..e1226d43 100644
--- a/tests/test_config.py
+++ b/tests/test_config.py
@@ -869,6 +869,7 @@ class TestConfigTestEnv:
assert "PATH" in envconfig.passenv
assert "PIP_INDEX_URL" in envconfig.passenv
assert "LANG" in envconfig.passenv
+ assert "LANGUAGE" in envconfig.passenv
assert "LD_LIBRARY_PATH" in envconfig.passenv
assert "A123A" in envconfig.passenv
assert "A123B" in envconfig.passenv
@@ -898,6 +899,7 @@ class TestConfigTestEnv:
assert "PATH" in envconfig.passenv
assert "PIP_INDEX_URL" in envconfig.passenv
assert "LANG" in envconfig.passenv
+ assert "LANGUAGE" in envconfig.passenv
assert "A123A" in envconfig.passenv
assert "A123B" in envconfig.passenv
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 2
} | 2.6 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest>=2.3.5",
"pytest-timeout",
"pytest"
],
"pre_install": null,
"python": "3.6",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
certifi==2021.5.30
distlib==0.3.9
filelock==3.4.1
importlib-metadata==4.8.3
importlib-resources==5.4.0
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
platformdirs==2.4.0
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
pytest-timeout==2.1.0
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
-e git+https://github.com/tox-dev/tox.git@5df724f06f6dda3b9b5f3e4e80a7f9dcaa1c823b#egg=tox
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
virtualenv==20.17.1
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: tox
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- distlib==0.3.9
- filelock==3.4.1
- importlib-metadata==4.8.3
- importlib-resources==5.4.0
- platformdirs==2.4.0
- pytest-timeout==2.1.0
- virtualenv==20.17.1
prefix: /opt/conda/envs/tox
| [
"tests/test_config.py::TestConfigTestEnv::test_passenv_as_multiline_list[win32]",
"tests/test_config.py::TestConfigTestEnv::test_passenv_as_multiline_list[linux2]",
"tests/test_config.py::TestConfigTestEnv::test_passenv_as_space_separated_list[win32]",
"tests/test_config.py::TestConfigTestEnv::test_passenv_as_space_separated_list[linux2]"
]
| [
"tests/test_config.py::TestVenvConfig::test_force_dep_with_url",
"tests/test_config.py::TestIniParser::test_getbool"
]
| [
"tests/test_config.py::TestVenvConfig::test_config_parsing_minimal",
"tests/test_config.py::TestVenvConfig::test_config_parsing_multienv",
"tests/test_config.py::TestVenvConfig::test_envdir_set_manually",
"tests/test_config.py::TestVenvConfig::test_envdir_set_manually_with_substitutions",
"tests/test_config.py::TestVenvConfig::test_force_dep_version",
"tests/test_config.py::TestVenvConfig::test_is_same_dep",
"tests/test_config.py::TestConfigPlatform::test_config_parse_platform",
"tests/test_config.py::TestConfigPlatform::test_config_parse_platform_with_factors[win]",
"tests/test_config.py::TestConfigPlatform::test_config_parse_platform_with_factors[lin]",
"tests/test_config.py::TestConfigPackage::test_defaults",
"tests/test_config.py::TestConfigPackage::test_defaults_distshare",
"tests/test_config.py::TestConfigPackage::test_defaults_changed_dir",
"tests/test_config.py::TestConfigPackage::test_project_paths",
"tests/test_config.py::TestParseconfig::test_search_parents",
"tests/test_config.py::TestParseconfig::test_explicit_config_path",
"tests/test_config.py::test_get_homedir",
"tests/test_config.py::TestGetcontextname::test_blank",
"tests/test_config.py::TestGetcontextname::test_jenkins",
"tests/test_config.py::TestGetcontextname::test_hudson_legacy",
"tests/test_config.py::TestIniParserAgainstCommandsKey::test_command_substitution_from_other_section",
"tests/test_config.py::TestIniParserAgainstCommandsKey::test_command_substitution_from_other_section_multiline",
"tests/test_config.py::TestIniParserAgainstCommandsKey::test_command_substitution_from_other_section_posargs",
"tests/test_config.py::TestIniParserAgainstCommandsKey::test_command_section_and_posargs_substitution",
"tests/test_config.py::TestIniParserAgainstCommandsKey::test_command_env_substitution",
"tests/test_config.py::TestIniParser::test_getstring_single",
"tests/test_config.py::TestIniParser::test_missing_substitution",
"tests/test_config.py::TestIniParser::test_getstring_fallback_sections",
"tests/test_config.py::TestIniParser::test_getstring_substitution",
"tests/test_config.py::TestIniParser::test_getlist",
"tests/test_config.py::TestIniParser::test_getdict",
"tests/test_config.py::TestIniParser::test_getstring_environment_substitution",
"tests/test_config.py::TestIniParser::test_getstring_environment_substitution_with_default",
"tests/test_config.py::TestIniParser::test_value_matches_section_substituion",
"tests/test_config.py::TestIniParser::test_value_doesn_match_section_substitution",
"tests/test_config.py::TestIniParser::test_getstring_other_section_substitution",
"tests/test_config.py::TestIniParser::test_argvlist",
"tests/test_config.py::TestIniParser::test_argvlist_windows_escaping",
"tests/test_config.py::TestIniParser::test_argvlist_multiline",
"tests/test_config.py::TestIniParser::test_argvlist_quoting_in_command",
"tests/test_config.py::TestIniParser::test_argvlist_comment_after_command",
"tests/test_config.py::TestIniParser::test_argvlist_command_contains_hash",
"tests/test_config.py::TestIniParser::test_argvlist_positional_substitution",
"tests/test_config.py::TestIniParser::test_argvlist_quoted_posargs",
"tests/test_config.py::TestIniParser::test_argvlist_posargs_with_quotes",
"tests/test_config.py::TestIniParser::test_positional_arguments_are_only_replaced_when_standing_alone",
"tests/test_config.py::TestIniParser::test_posargs_are_added_escaped_issue310",
"tests/test_config.py::TestIniParser::test_substitution_with_multiple_words",
"tests/test_config.py::TestIniParser::test_getargv",
"tests/test_config.py::TestIniParser::test_getpath",
"tests/test_config.py::TestIniParserPrefix::test_basic_section_access",
"tests/test_config.py::TestIniParserPrefix::test_fallback_sections",
"tests/test_config.py::TestIniParserPrefix::test_value_matches_prefixed_section_substituion",
"tests/test_config.py::TestIniParserPrefix::test_value_doesn_match_prefixed_section_substitution",
"tests/test_config.py::TestIniParserPrefix::test_other_section_substitution",
"tests/test_config.py::TestConfigTestEnv::test_commentchars_issue33",
"tests/test_config.py::TestConfigTestEnv::test_defaults",
"tests/test_config.py::TestConfigTestEnv::test_sitepackages_switch",
"tests/test_config.py::TestConfigTestEnv::test_installpkg_tops_develop",
"tests/test_config.py::TestConfigTestEnv::test_specific_command_overrides",
"tests/test_config.py::TestConfigTestEnv::test_whitelist_externals",
"tests/test_config.py::TestConfigTestEnv::test_changedir",
"tests/test_config.py::TestConfigTestEnv::test_ignore_errors",
"tests/test_config.py::TestConfigTestEnv::test_envbindir",
"tests/test_config.py::TestConfigTestEnv::test_envbindir_jython[jython]",
"tests/test_config.py::TestConfigTestEnv::test_envbindir_jython[pypy]",
"tests/test_config.py::TestConfigTestEnv::test_envbindir_jython[pypy3]",
"tests/test_config.py::TestConfigTestEnv::test_passenv_with_factor",
"tests/test_config.py::TestConfigTestEnv::test_passenv_from_global_env",
"tests/test_config.py::TestConfigTestEnv::test_passenv_glob_from_global_env",
"tests/test_config.py::TestConfigTestEnv::test_changedir_override",
"tests/test_config.py::TestConfigTestEnv::test_install_command_setting",
"tests/test_config.py::TestConfigTestEnv::test_install_command_must_contain_packages",
"tests/test_config.py::TestConfigTestEnv::test_install_command_substitutions",
"tests/test_config.py::TestConfigTestEnv::test_pip_pre",
"tests/test_config.py::TestConfigTestEnv::test_pip_pre_cmdline_override",
"tests/test_config.py::TestConfigTestEnv::test_simple",
"tests/test_config.py::TestConfigTestEnv::test_substitution_error",
"tests/test_config.py::TestConfigTestEnv::test_substitution_defaults",
"tests/test_config.py::TestConfigTestEnv::test_substitution_notfound_issue246",
"tests/test_config.py::TestConfigTestEnv::test_substitution_positional",
"tests/test_config.py::TestConfigTestEnv::test_substitution_noargs_issue240",
"tests/test_config.py::TestConfigTestEnv::test_substitution_double",
"tests/test_config.py::TestConfigTestEnv::test_posargs_backslashed_or_quoted",
"tests/test_config.py::TestConfigTestEnv::test_rewrite_posargs",
"tests/test_config.py::TestConfigTestEnv::test_rewrite_simple_posargs",
"tests/test_config.py::TestConfigTestEnv::test_take_dependencies_from_other_testenv[envlist0-deps0]",
"tests/test_config.py::TestConfigTestEnv::test_take_dependencies_from_other_testenv[envlist1-deps1]",
"tests/test_config.py::TestConfigTestEnv::test_take_dependencies_from_other_section",
"tests/test_config.py::TestConfigTestEnv::test_multilevel_substitution",
"tests/test_config.py::TestConfigTestEnv::test_recursive_substitution_cycle_fails",
"tests/test_config.py::TestConfigTestEnv::test_single_value_from_other_secton",
"tests/test_config.py::TestConfigTestEnv::test_factors",
"tests/test_config.py::TestConfigTestEnv::test_factor_ops",
"tests/test_config.py::TestConfigTestEnv::test_default_factors",
"tests/test_config.py::TestConfigTestEnv::test_factors_in_boolean",
"tests/test_config.py::TestConfigTestEnv::test_factors_in_setenv",
"tests/test_config.py::TestConfigTestEnv::test_factor_use_not_checked",
"tests/test_config.py::TestConfigTestEnv::test_factors_groups_touch",
"tests/test_config.py::TestConfigTestEnv::test_period_in_factor",
"tests/test_config.py::TestConfigTestEnv::test_ignore_outcome",
"tests/test_config.py::TestGlobalOptions::test_notest",
"tests/test_config.py::TestGlobalOptions::test_verbosity",
"tests/test_config.py::TestGlobalOptions::test_substitution_jenkins_default",
"tests/test_config.py::TestGlobalOptions::test_substitution_jenkins_context",
"tests/test_config.py::TestGlobalOptions::test_sdist_specification",
"tests/test_config.py::TestGlobalOptions::test_env_selection",
"tests/test_config.py::TestGlobalOptions::test_py_venv",
"tests/test_config.py::TestGlobalOptions::test_default_environments",
"tests/test_config.py::TestGlobalOptions::test_envlist_expansion",
"tests/test_config.py::TestGlobalOptions::test_envlist_cross_product",
"tests/test_config.py::TestGlobalOptions::test_envlist_multiline",
"tests/test_config.py::TestGlobalOptions::test_minversion",
"tests/test_config.py::TestGlobalOptions::test_skip_missing_interpreters_true",
"tests/test_config.py::TestGlobalOptions::test_skip_missing_interpreters_false",
"tests/test_config.py::TestGlobalOptions::test_defaultenv_commandline",
"tests/test_config.py::TestGlobalOptions::test_defaultenv_partial_override",
"tests/test_config.py::TestHashseedOption::test_default",
"tests/test_config.py::TestHashseedOption::test_passing_integer",
"tests/test_config.py::TestHashseedOption::test_passing_string",
"tests/test_config.py::TestHashseedOption::test_passing_empty_string",
"tests/test_config.py::TestHashseedOption::test_setenv",
"tests/test_config.py::TestHashseedOption::test_noset",
"tests/test_config.py::TestHashseedOption::test_noset_with_setenv",
"tests/test_config.py::TestHashseedOption::test_one_random_hashseed",
"tests/test_config.py::TestHashseedOption::test_setenv_in_one_testenv",
"tests/test_config.py::TestSetenv::test_getdict_lazy",
"tests/test_config.py::TestSetenv::test_getdict_lazy_update",
"tests/test_config.py::TestSetenv::test_setenv_uses_os_environ",
"tests/test_config.py::TestSetenv::test_setenv_default_os_environ",
"tests/test_config.py::TestSetenv::test_setenv_uses_other_setenv",
"tests/test_config.py::TestSetenv::test_setenv_recursive_direct",
"tests/test_config.py::TestSetenv::test_setenv_overrides",
"tests/test_config.py::TestSetenv::test_setenv_with_envdir_and_basepython",
"tests/test_config.py::TestSetenv::test_setenv_ordering_1",
"tests/test_config.py::TestSetenv::test_setenv_cross_section_subst_issue294",
"tests/test_config.py::TestSetenv::test_setenv_cross_section_subst_twice",
"tests/test_config.py::TestSetenv::test_setenv_cross_section_mixed",
"tests/test_config.py::TestIndexServer::test_indexserver",
"tests/test_config.py::TestIndexServer::test_parse_indexserver",
"tests/test_config.py::TestIndexServer::test_multiple_homedir_relative_local_indexservers",
"tests/test_config.py::TestConfigConstSubstitutions::test_replace_pathsep_unix[:]",
"tests/test_config.py::TestConfigConstSubstitutions::test_replace_pathsep_unix[;]",
"tests/test_config.py::TestConfigConstSubstitutions::test_pathsep_regex",
"tests/test_config.py::TestParseEnv::test_parse_recreate",
"tests/test_config.py::test_env_spec[-e",
"tests/test_config.py::TestCommandParser::test_command_parser_for_word",
"tests/test_config.py::TestCommandParser::test_command_parser_for_posargs",
"tests/test_config.py::TestCommandParser::test_command_parser_for_multiple_words",
"tests/test_config.py::TestCommandParser::test_command_parser_for_substitution_with_spaces",
"tests/test_config.py::TestCommandParser::test_command_parser_with_complex_word_set",
"tests/test_config.py::TestCommandParser::test_command_with_runs_of_whitespace",
"tests/test_config.py::TestCommandParser::test_command_with_split_line_in_subst_arguments",
"tests/test_config.py::TestCommandParser::test_command_parsing_for_issue_10"
]
| []
| MIT License | 1,099 | [
"doc/config.txt",
"tox/config.py"
]
| [
"doc/config.txt",
"tox/config.py"
]
|
|
jupyterhub__kubespawner-34 | dc41368f61d9916fa2972b6c0a944fc2a0a66b01 | 2017-03-19 01:06:56 | dc41368f61d9916fa2972b6c0a944fc2a0a66b01 | diff --git a/kubespawner/objects.py b/kubespawner/objects.py
index 0d35428..0564574 100644
--- a/kubespawner/objects.py
+++ b/kubespawner/objects.py
@@ -6,6 +6,8 @@ def make_pod_spec(
image_spec,
image_pull_policy,
image_pull_secret,
+ port,
+ cmd,
run_as_uid,
fs_gid,
env,
@@ -34,6 +36,10 @@ def make_pod_spec(
- image_pull_secret:
Image pull secret - Default is None -- set to your secret name to pull
from private docker registry.
+ - port:
+ Port the notebook server is going to be listening on
+ - cmd:
+ The command used to execute the singleuser server.
- run_as_uid:
The UID used to run single-user pods. The default is to run as the user
specified in the Dockerfile, if this is set to None.
@@ -87,9 +93,10 @@ def make_pod_spec(
{
'name': 'notebook',
'image': image_spec,
+ 'command': cmd,
'imagePullPolicy': image_pull_policy,
'ports': [{
- 'containerPort': 8888,
+ 'containerPort': port,
}],
'resources': {
'requests': {
diff --git a/kubespawner/spawner.py b/kubespawner/spawner.py
index a4e1919..d36f635 100644
--- a/kubespawner/spawner.py
+++ b/kubespawner/spawner.py
@@ -10,7 +10,7 @@ import string
from urllib.parse import urlparse, urlunparse
from tornado import gen
-from tornado.curl_httpclient import CurlAsyncHTTPClient
+from tornado.httpclient import AsyncHTTPClient
from tornado.httpclient import HTTPError
from traitlets import Unicode, List, Integer, Union
from jupyterhub.spawner import Spawner
@@ -28,8 +28,13 @@ class KubeSpawner(Spawner):
super().__init__(*args, **kwargs)
# By now, all the traitlets have been set, so we can use them to compute
# other attributes
- # FIXME: Make this param tuneable?
- self.httpclient = CurlAsyncHTTPClient(max_clients=64)
+ # Use curl HTTPClient if available, else fall back to Simple one
+ try:
+ from tornado.curl_httpclient import CurlAsyncHTTPClient
+ self.httpclient = CurlAsyncHTTPClient(max_clients=64)
+ except ImportError:
+ from tornado.simple_httpclient import SimpleAsyncHTTPClient
+ self.httpclient = SimpleAsyncHTTPClient(max_clients=64)
# FIXME: Support more than just kubeconfig
self.request = request_maker()
self.pod_name = self._expand_user_properties(self.pod_name_template)
@@ -44,6 +49,10 @@ class KubeSpawner(Spawner):
else:
self.accessible_hub_api_url = self.hub.api_url
+ if self.port == 0:
+ # Our default port is 8888
+ self.port = 8888
+
namespace = Unicode(
config=True,
help="""
@@ -67,6 +76,15 @@ class KubeSpawner(Spawner):
return f.read().strip()
return 'default'
+ ip = Unicode('0.0.0.0',
+ help="""
+ The IP address (or hostname) the single-user server should listen on.
+
+ We override this from the parent so we can set a more sane default for
+ the Kubernetes setup.
+ """
+ ).tag(config=True)
+
pod_name_template = Unicode(
'jupyter-{username}-{userid}',
config=True,
@@ -428,6 +446,8 @@ class KubeSpawner(Spawner):
self.singleuser_image_spec,
self.singleuser_image_pull_policy,
self.singleuser_image_pull_secrets,
+ self.port,
+ self.cmd + self.get_args(),
singleuser_uid,
singleuser_fs_gid,
self.get_env(),
@@ -576,9 +596,11 @@ class KubeSpawner(Spawner):
except HTTPError as e:
if e.code != 409:
# We only want to handle 409 conflict errors
+ self.log.exception("Failed for %s", json.dumps(pod_manifest))
raise
self.log.info('Found existing pod %s, attempting to kill', self.pod_name)
yield self.stop(True)
+
self.log.info('Killed pod %s, will try starting singleuser pod again', self.pod_name)
else:
raise Exception(
@@ -589,7 +611,7 @@ class KubeSpawner(Spawner):
if data is not None and self.is_pod_running(data):
break
yield gen.sleep(1)
- return (data['status']['podIP'], 8888)
+ return (data['status']['podIP'], self.port)
@gen.coroutine
def stop(self, now=False):
@@ -621,13 +643,19 @@ class KubeSpawner(Spawner):
def _env_keep_default(self):
return []
- def get_env(self):
- env = super(KubeSpawner, self).get_env()
- env.update({
- 'JPY_USER': self.user.name,
- 'JPY_COOKIE_NAME': self.user.server.cookie_name,
- 'JPY_BASE_URL': self.user.server.base_url,
- 'JPY_HUB_PREFIX': self.hub.server.base_url,
- 'JPY_HUB_API_URL': self.accessible_hub_api_url
- })
- return env
+ def get_args(self):
+ args = super(KubeSpawner, self).get_args()
+
+ # HACK: we wanna replace --hub-api-url=self.hub.api_url with
+ # self.accessible_hub_api_url. This is required in situations where
+ # the IP the hub is listening on (such as 0.0.0.0) is not the IP where
+ # it can be reached by the pods (such as the service IP used for the hub!)
+ # FIXME: Make this better?
+ print(args)
+ to_replace = '--hub-api-url="%s"' % (self.hub.api_url)
+ print(to_replace)
+ for i in range(len(args)):
+ if args[i] == to_replace:
+ args[i] = '--hub-api-url="%s"' % (self.accessible_hub_api_url)
+ break
+ return args
| Specify container command
Is it possible to specify the container command via config? To use the docker stacks, command must be set to `/usr/local/bin/singleuser.sh`, but I can't see how to do it here. In DockerSpawner, it's passed to `docker.create` | jupyterhub/kubespawner | diff --git a/tests/test_objects.py b/tests/test_objects.py
index 4b09347..d8028a5 100644
--- a/tests/test_objects.py
+++ b/tests/test_objects.py
@@ -14,6 +14,8 @@ def test_make_simplest_pod():
env={},
volumes=[],
volume_mounts=[],
+ cmd=['jupyterhub-singleuser'],
+ port=8888,
cpu_limit=None,
cpu_guarantee=None,
mem_limit=None,
@@ -35,6 +37,7 @@ def test_make_simplest_pod():
"name": "notebook",
"image": "jupyter/singleuser:latest",
"imagePullPolicy": "IfNotPresent",
+ "command": ["jupyterhub-singleuser"],
"ports": [{
"containerPort": 8888
}],
@@ -68,6 +71,8 @@ def test_make_pod_with_image_pull_secrets():
env={},
volumes=[],
volume_mounts=[],
+ cmd=['jupyterhub-singleuser'],
+ port=8888,
cpu_limit=None,
cpu_guarantee=None,
mem_limit=None,
@@ -91,6 +96,7 @@ def test_make_pod_with_image_pull_secrets():
"name": "notebook",
"image": "jupyter/singleuser:latest",
"imagePullPolicy": "IfNotPresent",
+ "command": ["jupyterhub-singleuser"],
"ports": [{
"containerPort": 8888
}],
@@ -124,6 +130,8 @@ def test_set_pod_uid_fs_gid():
env={},
volumes=[],
volume_mounts=[],
+ cmd=['jupyterhub-singleuser'],
+ port=8888,
cpu_limit=None,
cpu_guarantee=None,
mem_limit=None,
@@ -148,6 +156,7 @@ def test_set_pod_uid_fs_gid():
"name": "notebook",
"image": "jupyter/singleuser:latest",
"imagePullPolicy": "IfNotPresent",
+ "command": ["jupyterhub-singleuser"],
"ports": [{
"containerPort": 8888
}],
@@ -183,6 +192,8 @@ def test_make_pod_resources_all():
volume_mounts=[],
cpu_limit=2,
cpu_guarantee=1,
+ cmd=['jupyterhub-singleuser'],
+ port=8888,
mem_limit='1Gi',
mem_guarantee='512Mi',
image_pull_policy='IfNotPresent',
@@ -202,6 +213,7 @@ def test_make_pod_resources_all():
"name": "notebook",
"image": "jupyter/singleuser:latest",
"imagePullPolicy": "IfNotPresent",
+ "command": ["jupyterhub-singleuser"],
"ports": [{
"containerPort": 8888
}],
@@ -237,6 +249,8 @@ def test_make_pod_with_env():
},
volumes=[],
volume_mounts=[],
+ cmd=['jupyterhub-singleuser'],
+ port=8888,
cpu_limit=None,
cpu_guarantee=None,
mem_limit=None,
@@ -258,6 +272,7 @@ def test_make_pod_with_env():
"name": "notebook",
"image": "jupyter/singleuser:latest",
"imagePullPolicy": "IfNotPresent",
+ "command": ["jupyterhub-singleuser"],
"ports": [{
"containerPort": 8888
}],
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 2,
"test_score": 3
},
"num_modified_files": 2
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"configurable-http-proxy"
],
"pre_install": [
"apt-get update",
"apt-get install -y python3-dev libcurl4-openssl-dev libssl-dev"
],
"python": "3.9",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alembic==1.15.2
annotated-types==0.7.0
arrow==1.3.0
async-generator==1.10
attrs==25.3.0
certifi==2025.1.31
certipy==0.2.2
cffi==1.17.1
charset-normalizer==3.4.1
click==8.1.8
configurable-http-proxy==0.3.0
cryptography==44.0.2
exceptiongroup==1.2.2
fqdn==1.5.1
greenlet==3.1.1
idna==3.10
importlib_metadata==8.6.1
iniconfig==2.1.0
isoduration==20.11.0
Jinja2==3.1.6
jsonpointer==3.0.0
jsonschema==4.23.0
jsonschema-specifications==2024.10.1
jupyter-events==0.12.0
jupyterhub==5.2.1
-e git+https://github.com/jupyterhub/kubespawner.git@dc41368f61d9916fa2972b6c0a944fc2a0a66b01#egg=jupyterhub_kubespawner
Mako==1.3.9
MarkupSafe==3.0.2
oauthlib==3.2.2
packaging==24.2
pamela==1.2.0
pluggy==1.5.0
prometheus_client==0.21.1
pycparser==2.22
pycurl==7.45.6
pydantic==2.11.1
pydantic_core==2.33.0
pytest==8.3.5
python-dateutil==2.9.0.post0
python-json-logger==3.3.0
PyYAML==6.0.2
referencing==0.36.2
requests==2.32.3
rfc3339-validator==0.1.4
rfc3986-validator==0.1.1
rpds-py==0.24.0
six==1.17.0
SQLAlchemy==2.0.40
tomli==2.2.1
tornado==6.4.2
traitlets==5.14.3
types-python-dateutil==2.9.0.20241206
typing-inspection==0.4.0
typing_extensions==4.13.0
uri-template==1.3.0
urllib3==2.3.0
webcolors==24.11.1
zipp==3.21.0
| name: kubespawner
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alembic==1.15.2
- annotated-types==0.7.0
- arrow==1.3.0
- async-generator==1.10
- attrs==25.3.0
- certifi==2025.1.31
- certipy==0.2.2
- cffi==1.17.1
- charset-normalizer==3.4.1
- click==8.1.8
- configurable-http-proxy==0.3.0
- cryptography==44.0.2
- exceptiongroup==1.2.2
- fqdn==1.5.1
- greenlet==3.1.1
- idna==3.10
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- isoduration==20.11.0
- jinja2==3.1.6
- jsonpointer==3.0.0
- jsonschema==4.23.0
- jsonschema-specifications==2024.10.1
- jupyter-events==0.12.0
- jupyterhub==5.2.1
- mako==1.3.9
- markupsafe==3.0.2
- oauthlib==3.2.2
- packaging==24.2
- pamela==1.2.0
- pluggy==1.5.0
- prometheus-client==0.21.1
- pycparser==2.22
- pycurl==7.45.6
- pydantic==2.11.1
- pydantic-core==2.33.0
- pytest==8.3.5
- python-dateutil==2.9.0.post0
- python-json-logger==3.3.0
- pyyaml==6.0.2
- referencing==0.36.2
- requests==2.32.3
- rfc3339-validator==0.1.4
- rfc3986-validator==0.1.1
- rpds-py==0.24.0
- six==1.17.0
- sqlalchemy==2.0.40
- tomli==2.2.1
- tornado==6.4.2
- traitlets==5.14.3
- types-python-dateutil==2.9.0.20241206
- typing-extensions==4.13.0
- typing-inspection==0.4.0
- uri-template==1.3.0
- urllib3==2.3.0
- webcolors==24.11.1
- zipp==3.21.0
prefix: /opt/conda/envs/kubespawner
| [
"tests/test_objects.py::test_make_simplest_pod",
"tests/test_objects.py::test_make_pod_with_image_pull_secrets",
"tests/test_objects.py::test_set_pod_uid_fs_gid",
"tests/test_objects.py::test_make_pod_resources_all",
"tests/test_objects.py::test_make_pod_with_env"
]
| []
| [
"tests/test_objects.py::test_make_pvc_simple",
"tests/test_objects.py::test_make_resources_all"
]
| []
| BSD 3-Clause "New" or "Revised" License | 1,100 | [
"kubespawner/objects.py",
"kubespawner/spawner.py"
]
| [
"kubespawner/objects.py",
"kubespawner/spawner.py"
]
|
|
zhmcclient__python-zhmcclient-218 | 560cdaa7fda0f63688efa192cbfe711324a194ea | 2017-03-20 15:20:38 | 63bfc356570b865f4eac1d6a37c62e7b018520fc | coveralls:
[](https://coveralls.io/builds/10680542)
Coverage decreased (-0.1%) to 88.626% when pulling **f0b45f6a790af1cb7778ff36a9a8a73e8a934ddb on andy/fix-mock** into **560cdaa7fda0f63688efa192cbfe711324a194ea on master**.
| diff --git a/docs/changes.rst b/docs/changes.rst
index c6b5f3b..4abc74c 100644
--- a/docs/changes.rst
+++ b/docs/changes.rst
@@ -33,8 +33,32 @@ Released: not yet
* Added WWPN support in mocking framework (issue #212).
+* Fixed error in mock support where the `operation_timeout` argument to
+ `FakedSession.post()` was missing.
+
+* Fixed a bug in the unit test for the mock support, that caused incomplete
+ expected results not to be surfaced, and fixed the incomplete testcases.
+
**Enhancements:**
+* Improved the mock support by adding the typical attributes of its superclass
+ `FakedBaseResource` to the `FakedHmc` class.
+
+* Improved the mock support by adding `__repr__()` methods to all `Faked*`
+ classes that return an object representation suitable for debugging.
+
+* In the mock support, the following resource properties are now auto-set if
+ not specified in the input properties:
+ - Cpc:
+ - 'dpm-enabled' is auto-set to `False`, if not specified.
+ - 'is-ensemble-member' is auto-set to `False`, if not specified.
+ - 'status' is auto-set, if not specified, as follows: If the
+ 'dpm-enabled' property is `True`, it is set to 'active';
+ otherwise it is set to 'operating'.
+ - Partition: 'status' is auto-set to 'stopped', if not specified.
+ - Lpar: 'status' is auto-set to 'not-activated', if not specified.
+ - Adapter: 'status' is auto-set to 'active', if not specified.
+
**Known Issues:**
* See `list of open issues`_.
diff --git a/zhmcclient_mock/_hmc.py b/zhmcclient_mock/_hmc.py
index 450855e..74b45b0 100644
--- a/zhmcclient_mock/_hmc.py
+++ b/zhmcclient_mock/_hmc.py
@@ -25,6 +25,7 @@ try:
except ImportError:
from ordereddict import OrderedDict
import six
+import pprint
from ._idpool import IdPool
@@ -48,18 +49,47 @@ class FakedBaseResource(object):
"""
def __init__(self, manager, properties):
- self._manager = manager
- self._properties = properties
+ self._manager = manager # May be None
+ self._properties = properties.copy() if properties is not None \
+ else None
- if self.manager.oid_prop not in self.properties:
- new_oid = self.manager._new_oid()
- self.properties[self.manager.oid_prop] = new_oid
- self._oid = self.properties[self.manager.oid_prop]
+ if self.manager and self.properties:
- if self.manager.uri_prop not in self.properties:
- new_uri = self.manager.base_uri + '/' + self.oid
- self.properties[self.manager.uri_prop] = new_uri
- self._uri = self.properties[self.manager.uri_prop]
+ if self.manager.oid_prop not in self.properties:
+ new_oid = self.manager._new_oid()
+ self.properties[self.manager.oid_prop] = new_oid
+ self._oid = self.properties[self.manager.oid_prop]
+
+ if self.manager.uri_prop not in self.properties:
+ new_uri = self.manager.base_uri + '/' + self.oid
+ self.properties[self.manager.uri_prop] = new_uri
+ self._uri = self.properties[self.manager.uri_prop]
+
+ else:
+ self._oid = None
+ self._uri = None
+
+ def __repr__(self):
+ """
+ Return a string with the state of this faked resource, for debug
+ purposes.
+ """
+ ret = (
+ "{classname} at 0x{id:08x} (\n"
+ " _manager = {manager_classname} at 0x{manager_id:08x}\n"
+ " _manager._parent._uri = {parent_uri!r}\n"
+ " _uri = {_uri!r}\n"
+ " _properties = {_properties}\n"
+ ")".format(
+ classname=self.__class__.__name__,
+ id=id(self),
+ manager_classname=self._manager.__class__.__name__,
+ manager_id=id(self._manager),
+ parent_uri=self._manager.parent.uri,
+ _uri=self._uri,
+ _properties=pprint.pformat(self.properties, indent=4),
+ ))
+ return ret
@property
def manager(self):
@@ -122,6 +152,35 @@ class FakedBaseManager(object):
self._uri_prop = uri_prop
self._resources = OrderedDict() # Resource objects, by object ID
+ def __repr__(self):
+ """
+ Return a string with the state of this faked manager, for debug
+ purposes.
+ """
+ ret = (
+ "{classname} at 0x{id:08x} (\n"
+ " _hmc = {hmc_classname} at 0x{hmc_id:08x}\n"
+ " _parent = {parent_classname} at 0x{parent_id:08x}\n"
+ " _resource_class = {_resource_class!r}\n"
+ " _base_uri = {_base_uri!r}\n"
+ " _oid_prop = {_oid_prop!r}\n"
+ " _uri_prop = {_uri_prop!r}\n"
+ " _resources = {_resources}\n"
+ ")".format(
+ classname=self.__class__.__name__,
+ id=id(self),
+ hmc_classname=self._hmc.__class__.__name__,
+ hmc_id=id(self._hmc),
+ parent_classname=self._parent.__class__.__name__,
+ parent_id=id(self._parent),
+ _resource_class=self._resource_class,
+ _base_uri=self._base_uri,
+ _oid_prop=self._oid_prop,
+ _uri_prop=self._uri_prop,
+ _resources=pprint.pformat(self._resources, indent=4),
+ ))
+ return ret
+
@property
def hmc(self):
"""
@@ -260,12 +319,34 @@ class FakedHmc(FakedBaseResource):
"""
def __init__(self, hmc_name, hmc_version, api_version):
+ super(FakedHmc, self).__init__(manager=None, properties=None)
self.hmc_name = hmc_name
self.hmc_version = hmc_version
self.api_version = api_version
self.cpcs = FakedCpcManager(hmc=self, client=self)
self._resources = {} # by URI
+ def __repr__(self):
+ """
+ Return a string with the state of this faked HMC, for debug purposes.
+ """
+ ret = (
+ "FakedHmc at 0x{id:08x} (\n"
+ " hmc_name = {hmc_name!r}\n"
+ " hmc_version = {hmc_version!r}\n"
+ " api_version = {api_version!r}\n"
+ " cpcs = {cpcs!r}\n"
+ " _resources = {_resources!r}\n"
+ ")".format(
+ id=id(self),
+ hmc_name=self.hmc_name,
+ hmc_version=self.hmc_version,
+ api_version=self.api_version,
+ cpcs=self.cpcs,
+ _resources=self._resources,
+ ))
+ return ret
+
def add_resources(self, resources):
"""
Add faked resources to the faked HMC, from the provided resource
@@ -482,13 +563,14 @@ class FakedAdapterManager(FakedBaseManager):
all instances of this resource type, if not specified.
* 'object-uri' will be auto-generated based upon the object ID,
if not specified.
+ * 'status' is auto-set to 'active', if not specified.
* 'adapter-family' or 'type' is required to be specified, in order
to determine whether the adapter is a network or storage adapter.
* 'adapter-family' is auto-set based upon 'type', if not specified.
- * 'network-port-uris' is auto-set to an empty list, if not set,
- for network adapters.
- * 'storage-port-uris' is auto-set to an empty list, if not set,
- for storage adapters.
+ * For network adapters, 'network-port-uris' is auto-set to an empty
+ list, if not specified.
+ * For storage adapters, 'storage-port-uris' is auto-set to an empty
+ list, if not specified.
Returns:
:class:`~zhmcclient_mock.FakedAdapter`: The faked Adapter resource.
@@ -509,35 +591,35 @@ class FakedAdapter(FakedBaseResource):
super(FakedAdapter, self).__init__(
manager=manager,
properties=properties)
- if 'adapter-family' in properties:
- family = properties['adapter-family']
+ if 'adapter-family' in self.properties:
+ family = self.properties['adapter-family']
if family in ('osa', 'roce', 'hipersockets'):
self._adapter_kind = 'network'
elif family in ('ficon',):
self._adapter_kind = 'storage'
else:
self._adapter_kind = 'other'
- elif 'type' in properties:
+ elif 'type' in self.properties:
# because 'type' is more specific than 'adapter-family', we can
# auto-set 'adapter-family' from 'type'.
- type_ = properties['type']
+ type_ = self.properties['type']
if type_ in ('osd', 'osm'):
- self._properties['adapter-family'] = 'osa'
+ self.properties['adapter-family'] = 'osa'
self._adapter_kind = 'network'
elif type_ == 'roce':
- self._properties['adapter-family'] = 'roce'
+ self.properties['adapter-family'] = 'roce'
self._adapter_kind = 'network'
elif type_ == 'hipersockets':
- self._properties['adapter-family'] = 'hipersockets'
+ self.properties['adapter-family'] = 'hipersockets'
self._adapter_kind = 'network'
elif type_ == 'fcp':
- self._properties['adapter-family'] = 'ficon'
+ self.properties['adapter-family'] = 'ficon'
self._adapter_kind = 'storage'
elif type_ == 'crypto':
- self._properties['adapter-family'] = 'crypto'
+ self.properties['adapter-family'] = 'crypto'
self._adapter_kind = 'other'
elif type_ == 'zedc':
- self._properties['adapter-family'] = 'accelerator'
+ self.properties['adapter-family'] = 'accelerator'
self._adapter_kind = 'other'
else:
raise ValueError("FakedAdapter with object-id=%s has an "
@@ -549,14 +631,16 @@ class FakedAdapter(FakedBaseResource):
self.oid)
if self.adapter_kind == 'network':
if 'network-port-uris' not in self.properties:
- self._properties['network-port-uris'] = []
+ self.properties['network-port-uris'] = []
self._ports = FakedPortManager(hmc=manager.hmc, adapter=self)
elif self.adapter_kind == 'storage':
if 'storage-port-uris' not in self.properties:
- self._properties['storage-port-uris'] = []
+ self.properties['storage-port-uris'] = []
self._ports = FakedPortManager(hmc=manager.hmc, adapter=self)
else:
self._ports = None
+ if 'status' not in self.properties:
+ self.properties['status'] = 'active'
@property
def ports(self):
@@ -616,6 +700,11 @@ class FakedCpcManager(FakedBaseManager):
all instances of this resource type, if not specified.
* 'object-uri' will be auto-generated based upon the object ID,
if not specified.
+ * 'dpm-enabled' is auto-set to `False`, if not specified.
+ * 'is-ensemble-member' is auto-set to `False`, if not specified.
+ * 'status' is auto-set, if not specified, as follows: If the
+ 'dpm-enabled' property is `True`, it is set to 'active';
+ otherwise it is set to 'operating'.
Returns:
:class:`~zhmcclient_mock.FakedCpc`: The faked CPC resource.
@@ -647,15 +736,24 @@ class FakedCpc(FakedBaseResource):
hmc=manager.hmc, cpc=self, profile_type='image')
self._load_activation_profiles = FakedActivationProfileManager(
hmc=manager.hmc, cpc=self, profile_type='load')
+ if 'dpm-enabled' not in self.properties:
+ self.properties['dpm-enabled'] = False
+ if 'is-ensemble-member' not in self.properties:
+ self.properties['is-ensemble-member'] = False
+ if 'status' not in self.properties:
+ if self.dpm_enabled:
+ self.properties['status'] = 'active'
+ else:
+ self.properties['status'] = 'operating'
@property
def dpm_enabled(self):
"""
bool: Indicates whether this CPC is in DPM mode.
- This is based upon the 'dpm-enabled' property and defaults to `False`.
+ This returns the value of the 'dpm-enabled' property.
"""
- return self.properties.get('dpm-enabled', False)
+ return self.properties['dpm-enabled']
@property
def lpars(self):
@@ -853,6 +951,7 @@ class FakedLparManager(FakedBaseManager):
all instances of this resource type, if not specified.
* 'object-uri' will be auto-generated based upon the object ID,
if not specified.
+ * 'status' is auto-set to 'not-activated', if not specified.
Returns:
:class:`~zhmcclient_mock.FakedLpar`: The faked LPAR resource.
@@ -873,6 +972,8 @@ class FakedLpar(FakedBaseResource):
super(FakedLpar, self).__init__(
manager=manager,
properties=properties)
+ if 'status' not in self.properties:
+ self.properties['status'] = 'not-activated'
class FakedNicManager(FakedBaseManager):
@@ -1018,6 +1119,7 @@ class FakedPartitionManager(FakedBaseManager):
specified.
* 'virtual-function-uris' will be auto-generated as an empty array,
if not specified.
+ * 'status' is auto-set to 'stopped', if not specified.
Returns:
:class:`~zhmcclient_mock.FakedPartition`: The faked Partition
@@ -1050,6 +1152,8 @@ class FakedPartition(FakedBaseResource):
self.properties['nic-uris'] = []
if 'virtual-function-uris' not in self.properties:
self.properties['virtual-function-uris'] = []
+ if 'status' not in self.properties:
+ self.properties['status'] = 'stopped'
self._nics = FakedNicManager(hmc=manager.hmc, partition=self)
self._hbas = FakedHbaManager(hmc=manager.hmc, partition=self)
self._virtual_functions = FakedVirtualFunctionManager(
diff --git a/zhmcclient_mock/_session.py b/zhmcclient_mock/_session.py
index 0471caa..140e70d 100644
--- a/zhmcclient_mock/_session.py
+++ b/zhmcclient_mock/_session.py
@@ -126,7 +126,7 @@ class FakedSession(zhmcclient.Session):
raise zhmcclient.HTTPError(exc.response())
def post(self, uri, body=None, logon_required=True,
- wait_for_completion=True):
+ wait_for_completion=True, operation_timeout=None):
"""
Perform the HTTP POST method against the resource identified by a URI,
using a provided request body, on the faked HMC.
@@ -189,6 +189,18 @@ class FakedSession(zhmcclient.Session):
HTTP POST method, regardless of whether the operation is
synchronous or asynchronous.
+ operation_timeout (:term:`number`):
+ Timeout in seconds, when waiting for completion of an asynchronous
+ operation. The special value 0 means that no timeout is set. `None`
+ means that the default async operation timeout of the session is
+ used.
+
+ For `wait_for_completion=True`, a
+ :exc:`~zhmcclient.OperationTimeout` is raised when the timeout
+ expires.
+
+ For `wait_for_completion=False`, this parameter has no effect.
+
Returns:
:term:`json object`:
| zhmccliet_mock framework- partition start stop works with different partition object
### Actual behavior
partition.delete() invokation followed with query of status parameter on Partition fails
File "nova_dpm/virt/dpm/vm.py", line 315, in destroy
if (self.partition.properties['status'] == 'stopped'):
KeyError: 'status'
Placed some debuglogs in _urihandler.py and vm.py to see if the partition object is same but doesnt seem to be so...
###############destroy function debug logs in vm.py###################
destroy function
/api/partitions/1
140453452362448
#################logs from _urihandler.py############################
PartitionStopHandler
/api/partitions/1
140453452274384
stopped
{'virtual-function-uris': [], 'hba-uris': ['/api/partitions/1/hbas/1'], 'object-uri': '/api/partitions/1', 'initial-memory': 512, 'nic-uris': [], 'status': 'stopped', 'ifl-processors': 1, 'name': 'OpenStack-fakemini-38400000-8cf0-11bd-b23e-10b96e4ef00d', 'object-id': '1', 'description': 'Partition #1 in CPC #2'}
### Expected behavior
partition.delete() with zhmcclient_mock is expected to have invoked the mocking framework's PartitionStopHandler function in _urihandler.py which is expected to add new parameter 'status' which is set with value "stopped"
### Execution environment
zhmccliet_mock
* zhmcclient version:
* Operating system (type+version):
| zhmcclient/python-zhmcclient | diff --git a/tests/unit/zhmcclient_mock/test_hmc.py b/tests/unit/zhmcclient_mock/test_hmc.py
index cde9768..2a3f05d 100755
--- a/tests/unit/zhmcclient_mock/test_hmc.py
+++ b/tests/unit/zhmcclient_mock/test_hmc.py
@@ -20,6 +20,7 @@ Unit tests for _hmc module of the zhmcclient_mock package.
from __future__ import absolute_import, print_function
import unittest
+import re
from zhmcclient_mock._hmc import FakedHmc, \
FakedActivationProfileManager, FakedActivationProfile, \
@@ -31,7 +32,8 @@ from zhmcclient_mock._hmc import FakedHmc, \
FakedPartitionManager, FakedPartition, \
FakedPortManager, FakedPort, \
FakedVirtualFunctionManager, FakedVirtualFunction, \
- FakedVirtualSwitchManager, FakedVirtualSwitch
+ FakedVirtualSwitchManager, FakedVirtualSwitch, \
+ FakedBaseManager, FakedBaseResource
class FakedHmcTests(unittest.TestCase):
@@ -40,6 +42,45 @@ class FakedHmcTests(unittest.TestCase):
def setUp(self):
self.hmc = FakedHmc('fake-hmc', '2.13.1', '1.8')
+ def test_repr(self):
+
+ # The test approach is to check the repr() result for each attribute
+ # that is shown in the result, but leaving some flexibility in how that
+ # is formatted.
+
+ # Bring everything into one line, because regexp is line-oriented.
+ act_repr = repr(self.hmc).replace('\n', '\\n')
+
+ self.assertRegexpMatches(
+ act_repr,
+ r'.*FakedHmc\s+at\s+0x{id:08x}\s+\(\\n.*'.
+ format(id=id(self.hmc)))
+
+ self.assertRegexpMatches(
+ act_repr,
+ r'.*\shmc_name\s*=\s*{hmc_name!r}\\n.*'.
+ format(hmc_name=self.hmc.hmc_name))
+
+ self.assertRegexpMatches(
+ act_repr,
+ r'.*\shmc_version\s*=\s*{hmc_version!r}\\n.*'.
+ format(hmc_version=self.hmc.hmc_version))
+
+ self.assertRegexpMatches(
+ act_repr,
+ r'.*\sapi_version\s*=\s*{api_version!r}\\n.*'.
+ format(api_version=self.hmc.api_version))
+
+ self.assertRegexpMatches(
+ act_repr,
+ r'.*\scpcs\s*=\s*FakedCpcManager\s.*')
+ # TODO: Check content of `cpcs`
+
+ self.assertRegexpMatches(
+ act_repr,
+ r'.*\s_resources\s*=\s.*')
+ # TODO: Check content of `_resources`
+
def test_hmc(self):
self.assertEqual(self.hmc.hmc_name, 'fake-hmc')
self.assertEqual(self.hmc.hmc_version, '2.13.1')
@@ -55,12 +96,15 @@ class FakedHmcTests(unittest.TestCase):
cpc1_in_props = {'name': 'cpc1'}
# the function to be tested:
- cpc1 = self.hmc.cpcs.add({'name': 'cpc1'})
+ cpc1 = self.hmc.cpcs.add(cpc1_in_props)
cpc1_out_props = cpc1_in_props.copy()
cpc1_out_props.update({
'object-id': cpc1.oid,
'object-uri': cpc1.uri,
+ 'dpm-enabled': False,
+ 'is-ensemble-member': False,
+ 'status': 'operating',
})
# the function to be tested:
@@ -83,6 +127,9 @@ class FakedHmcTests(unittest.TestCase):
cpc1_out_props.update({
'object-id': cpc1.oid,
'object-uri': cpc1.uri,
+ 'dpm-enabled': False,
+ 'is-ensemble-member': False,
+ 'status': 'operating',
})
cpc2_in_props = {'name': 'cpc2'}
@@ -94,6 +141,9 @@ class FakedHmcTests(unittest.TestCase):
cpc2_out_props.update({
'object-id': cpc2.oid,
'object-uri': cpc2.uri,
+ 'dpm-enabled': False,
+ 'is-ensemble-member': False,
+ 'status': 'operating',
})
# the function to be tested:
@@ -145,6 +195,9 @@ class FakedHmcTests(unittest.TestCase):
cpc1_out_props.update({
'object-id': cpc1.oid,
'object-uri': cpc1.uri,
+ 'dpm-enabled': False,
+ 'is-ensemble-member': False,
+ 'status': 'operating',
})
self.assertIsInstance(cpc1, FakedCpc)
self.assertEqual(cpc1.properties, cpc1_out_props)
@@ -153,22 +206,24 @@ class FakedHmcTests(unittest.TestCase):
cpc1_adapters = cpc1.adapters.list()
self.assertEqual(len(cpc1_adapters), 1)
-
adapter1 = cpc1_adapters[0]
+
+ adapter1_ports = adapter1.ports.list()
+
+ self.assertEqual(len(adapter1_ports), 1)
+ port1 = adapter1_ports[0]
+
adapter1_out_props = adapter1_in_props.copy()
adapter1_out_props.update({
'object-id': adapter1.oid,
'object-uri': adapter1.uri,
+ 'status': 'active',
+ 'network-port-uris': [port1.uri],
})
self.assertIsInstance(adapter1, FakedAdapter)
self.assertEqual(adapter1.properties, adapter1_out_props)
self.assertEqual(adapter1.manager, cpc1.adapters)
- adapter1_ports = adapter1.ports.list()
-
- self.assertEqual(len(adapter1_ports), 1)
-
- port1 = adapter1_ports[0]
port1_out_props = port1_in_props.copy()
port1_out_props.update({
'element-id': port1.oid,
@@ -179,6 +234,151 @@ class FakedHmcTests(unittest.TestCase):
self.assertEqual(port1.manager, adapter1.ports)
+class FakedBaseTests(unittest.TestCase):
+ """All tests for the FakedBaseManager and FakedBaseResource classes."""
+
+ def setUp(self):
+ self.hmc = FakedHmc('fake-hmc', '2.13.1', '1.8')
+ self.cpc1_in_props = {
+ # All properties that are otherwise defaulted (but with non-default
+ # values), plus 'name'.
+ 'object-id': '42',
+ 'object-uri': '/api/cpcs/42',
+ 'dpm-enabled': True,
+ 'is-ensemble-member': False,
+ 'status': 'service',
+ 'name': 'cpc1',
+ }
+ rd = {
+ 'cpcs': [
+ {
+ 'properties': self.cpc1_in_props,
+ },
+ ]
+ }
+ self.hmc.add_resources(rd)
+ self.cpc_manager = self.hmc.cpcs
+ self.cpc_resource = self.hmc.cpcs.list()[0]
+ self.cpc1_out_props = self.cpc1_in_props.copy()
+
+ def test_manager_attr(self):
+ """Test FakedBaseManager attributes."""
+
+ self.assertIsInstance(self.cpc_manager, FakedBaseManager)
+
+ self.assertEqual(self.cpc_manager.hmc, self.hmc)
+ self.assertEqual(self.cpc_manager.parent, self.hmc)
+ self.assertEqual(self.cpc_manager.resource_class, FakedCpc)
+ self.assertEqual(self.cpc_manager.base_uri, '/api/cpcs')
+ self.assertEqual(self.cpc_manager.oid_prop, 'object-id')
+ self.assertEqual(self.cpc_manager.uri_prop, 'object-uri')
+
+ def test_resource_attr(self):
+ """Test FakedBaseResource attributes."""
+
+ self.assertIsInstance(self.cpc_resource, FakedBaseResource)
+
+ self.assertEqual(self.cpc_resource.manager, self.cpc_manager)
+ self.assertEqual(self.cpc_resource.properties, self.cpc1_out_props)
+ self.assertEqual(self.cpc_resource.oid,
+ self.cpc1_out_props['object-id'])
+ self.assertEqual(self.cpc_resource.uri,
+ self.cpc1_out_props['object-uri'])
+
+ def test_manager_repr(self):
+ """Test FakedBaseManager.__repr__()."""
+
+ # The test approach is to check the repr() result for each attribute
+ # that is shown in the result, but leaving some flexibility in how that
+ # is formatted.
+
+ # Bring everything into one line, because regexp is line-oriented.
+ act_repr = repr(self.cpc_manager).replace('\n', '\\n')
+
+ self.assertRegexpMatches(
+ act_repr,
+ r'.*{classname}\s+at\s+0x{id:08x}\s+\(\\n.*'.
+ format(classname=self.cpc_manager.__class__.__name__,
+ id=id(self.cpc_manager)))
+
+ self.assertRegexpMatches(
+ act_repr,
+ r'.*\s_hmc\s*=\s*{hmc_classname}\s+at\s+0x{hmc_id:08x}\\n.*'.
+ format(hmc_classname=self.cpc_manager.hmc.__class__.__name__,
+ hmc_id=id(self.cpc_manager.hmc)))
+
+ self.assertRegexpMatches(
+ act_repr,
+ r'.*\s_parent\s*=\s*{p_classname}\s+at\s+0x{p_id:08x}\\n.*'.
+ format(p_classname=self.cpc_manager.parent.__class__.__name__,
+ p_id=id(self.cpc_manager.parent)))
+
+ m = re.match(r'.*\s_resource_class\s*=\s*([^\\]+)\\n.*', act_repr)
+ if not m:
+ raise AssertionError("'_resource_class = ...' did not match "
+ "in: {!r}".format(act_repr))
+ act_resource_class = m.group(1)
+ self.assertEqual(act_resource_class,
+ repr(self.cpc_manager.resource_class))
+
+ self.assertRegexpMatches(
+ act_repr,
+ r'.*\s_base_uri\s*=\s*{_base_uri!r}\\n.*'.
+ format(_base_uri=self.cpc_manager.base_uri))
+
+ self.assertRegexpMatches(
+ act_repr,
+ r'.*\s_oid_prop\s*=\s*{_oid_prop!r}\\n.*'.
+ format(_oid_prop=self.cpc_manager.oid_prop))
+
+ self.assertRegexpMatches(
+ act_repr,
+ r'.*\s_uri_prop\s*=\s*{_uri_prop!r}\\n.*'.
+ format(_uri_prop=self.cpc_manager.uri_prop))
+
+ self.assertRegexpMatches(
+ act_repr,
+ r'.*\s_resources\s*=\s.*')
+ # TODO: Check content of `_resources`
+
+ def test_resource_repr(self):
+ """Test FakedBaseResource.__repr__()."""
+
+ # The test approach is to check the repr() result for each attribute
+ # that is shown in the result, but leaving some flexibility in how that
+ # is formatted.
+
+ # Bring everything into one line, because regexp is line-oriented.
+ act_repr = repr(self.cpc_resource).replace('\n', '\\n')
+
+ self.assertRegexpMatches(
+ act_repr,
+ r'.*{classname}\s+at\s+0x{id:08x}\s+\(\\n.*'.
+ format(classname=self.cpc_resource.__class__.__name__,
+ id=id(self.cpc_resource)))
+
+ self.assertRegexpMatches(
+ act_repr,
+ r'.*\s_manager\s*=\s*{m_classname}\s+at\s+0x{m_id:08x}\\n.*'.
+ format(m_classname=self.cpc_resource.manager.__class__.__name__,
+ m_id=id(self.cpc_resource.manager)))
+
+ self.assertRegexpMatches(
+ act_repr,
+ r'.*\s_manager._parent._uri\s*=\s*{p_uri!r}\\n.*'.
+ format(p_uri=self.cpc_resource.manager.parent.uri))
+
+ self.assertRegexpMatches(
+ act_repr,
+ r'.*\s_uri\s*=\s*{_uri!r}\\n.*'.
+ format(_uri=self.cpc_resource.uri))
+
+ self.assertRegexpMatches(
+ act_repr,
+ r'.*\s_properties\s*=\s.*')
+ # TODO: Check content of `_properties`
+
+
class FakedActivationProfileTests(unittest.TestCase):
"""All tests for the FakedActivationProfileManager and
FakedActivationProfile classes."""
@@ -382,6 +582,9 @@ class FakedAdapterTests(unittest.TestCase):
adapter1_out_props.update({
'object-id': adapter1.oid,
'object-uri': adapter1.uri,
+ 'status': 'active',
+ 'adapter-family': 'roce',
+ 'network-port-uris': [],
})
self.assertIsInstance(adapter1, FakedAdapter)
self.assertEqual(adapter1.properties, adapter1_out_props)
@@ -416,6 +619,8 @@ class FakedAdapterTests(unittest.TestCase):
adapter2_out_props.update({
'object-id': adapter2.oid,
'object-uri': adapter2.uri,
+ 'status': 'active',
+ 'storage-port-uris': [],
})
self.assertIsInstance(adapter2, FakedAdapter)
self.assertEqual(adapter2.properties, adapter2_out_props)
@@ -468,6 +673,9 @@ class FakedCpcTests(unittest.TestCase):
cpc1_out_props.update({
'object-id': cpc1.oid,
'object-uri': cpc1.uri,
+ 'dpm-enabled': False,
+ 'is-ensemble-member': False,
+ 'status': 'operating',
})
self.assertIsInstance(cpc1, FakedCpc)
self.assertEqual(cpc1.properties, cpc1_out_props)
@@ -508,6 +716,9 @@ class FakedCpcTests(unittest.TestCase):
cpc2_out_props.update({
'object-id': cpc2.oid,
'object-uri': cpc2.uri,
+ 'dpm-enabled': False,
+ 'is-ensemble-member': False,
+ 'status': 'operating',
})
self.assertIsInstance(cpc2, FakedCpc)
self.assertEqual(cpc2.properties, cpc2_out_props)
@@ -599,6 +810,8 @@ class FakedHbaTests(unittest.TestCase):
hba1_out_props.update({
'element-id': hba1.oid,
'element-uri': hba1.uri,
+ 'device-number': hba1.properties['device-number'],
+ 'wwpn': hba1.properties['wwpn'],
})
self.assertIsInstance(hba1, FakedHba)
self.assertEqual(hba1.properties, hba1_out_props)
@@ -617,6 +830,8 @@ class FakedHbaTests(unittest.TestCase):
'element-id': '2',
'name': 'hba2',
'adapter-port-uri': '/api/adapters/1/storage-ports/1',
+ 'device-number': '8001',
+ 'wwpn': 'AFFEAFFE00008001',
}
# the function to be tested:
@@ -703,6 +918,7 @@ class FakedLparTests(unittest.TestCase):
lpar1_out_props.update({
'object-id': lpar1.oid,
'object-uri': lpar1.uri,
+ 'status': 'not-activated',
})
self.assertIsInstance(lpar1, FakedLpar)
self.assertEqual(lpar1.properties, lpar1_out_props)
@@ -734,6 +950,7 @@ class FakedLparTests(unittest.TestCase):
lpar2_out_props.update({
'object-id': lpar2.oid,
'object-uri': lpar2.uri,
+ 'status': 'not-activated',
})
self.assertIsInstance(lpar2, FakedLpar)
self.assertEqual(lpar2.properties, lpar2_out_props)
@@ -827,6 +1044,7 @@ class FakedNicTests(unittest.TestCase):
nic1_out_props.update({
'element-id': nic1.oid,
'element-uri': nic1.uri,
+ 'device-number': nic1.properties['device-number'],
})
self.assertIsInstance(nic1, FakedNic)
self.assertEqual(nic1.properties, nic1_out_props)
@@ -863,6 +1081,7 @@ class FakedNicTests(unittest.TestCase):
nic2_out_props.update({
'element-id': nic2.oid,
'element-uri': nic2.uri,
+ 'device-number': nic2.properties['device-number'],
})
self.assertIsInstance(nic2, FakedNic)
self.assertEqual(nic2.properties, nic2_out_props)
@@ -929,6 +1148,10 @@ class FakedPartitionTests(unittest.TestCase):
partition1_out_props.update({
'object-id': partition1.oid,
'object-uri': partition1.uri,
+ 'status': 'stopped',
+ 'hba-uris': [],
+ 'nic-uris': [],
+ 'virtual-function-uris': [],
})
self.assertIsInstance(partition1, FakedPartition)
self.assertEqual(partition1.properties, partition1_out_props)
@@ -961,6 +1184,10 @@ class FakedPartitionTests(unittest.TestCase):
partition2_out_props.update({
'object-id': partition2.oid,
'object-uri': partition2.uri,
+ 'status': 'stopped',
+ 'hba-uris': [],
+ 'nic-uris': [],
+ 'virtual-function-uris': [],
})
self.assertIsInstance(partition2, FakedPartition)
self.assertEqual(partition2.properties, partition2_out_props)
@@ -1148,6 +1375,7 @@ class FakedVirtualFunctionTests(unittest.TestCase):
virtual_function1_out_props.update({
'element-id': virtual_function1.oid,
'element-uri': virtual_function1.uri,
+ 'device-number': virtual_function1.properties['device-number'],
})
self.assertIsInstance(virtual_function1, FakedVirtualFunction)
self.assertEqual(virtual_function1.properties,
@@ -1186,6 +1414,7 @@ class FakedVirtualFunctionTests(unittest.TestCase):
virtual_function2_out_props.update({
'element-id': virtual_function2.oid,
'element-uri': virtual_function2.uri,
+ 'device-number': virtual_function2.properties['device-number'],
})
self.assertIsInstance(virtual_function2, FakedVirtualFunction)
self.assertEqual(virtual_function2.properties,
diff --git a/tests/unit/zhmcclient_mock/test_urihandler.py b/tests/unit/zhmcclient_mock/test_urihandler.py
index 6febc10..9d3ba3f 100755
--- a/tests/unit/zhmcclient_mock/test_urihandler.py
+++ b/tests/unit/zhmcclient_mock/test_urihandler.py
@@ -576,6 +576,7 @@ class GenericGetPropertiesHandlerTests(unittest.TestCase):
'object-uri': '/api/cpcs/1',
'name': 'cpc_1',
'dpm-enabled': False,
+ 'is-ensemble-member': False,
'description': 'CPC #1 (classic mode)',
'status': 'operating',
}
@@ -678,6 +679,7 @@ class CpcHandlersTests(unittest.TestCase):
'object-uri': '/api/cpcs/1',
'name': 'cpc_1',
'dpm-enabled': False,
+ 'is-ensemble-member': False,
'description': 'CPC #1 (classic mode)',
'status': 'operating',
}
@@ -822,12 +824,12 @@ class AdapterHandlersTests(unittest.TestCase):
{
'object-uri': '/api/adapters/2',
'name': 'fcp_2',
- # status not set in resource -> not in response
+ 'status': 'active',
},
{
'object-uri': '/api/adapters/3',
'name': 'roce_3',
- # status not set in resource -> not in response
+ 'status': 'active',
},
]
}
@@ -1004,6 +1006,7 @@ class PartitionHandlersTests(unittest.TestCase):
'object-id': '2',
'object-uri': '/api/partitions/2',
'name': 'partition_2',
+ 'status': 'stopped',
'hba-uris': [],
'nic-uris': [],
'virtual-function-uris': [],
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 3
} | 0.11 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-mock",
"mock"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2025.1.31
charset-normalizer==3.4.1
click==8.1.8
click-repl==0.3.0
click-spinner==0.1.10
coverage==7.8.0
decorator==5.2.1
docopt==0.6.2
exceptiongroup==1.2.2
idna==3.10
iniconfig==2.1.0
mock==5.2.0
packaging==24.2
pbr==6.1.1
pluggy==1.5.0
progressbar2==4.5.0
prompt_toolkit==3.0.50
pytest==8.3.5
pytest-cov==6.0.0
pytest-mock==3.14.0
python-utils==3.9.1
requests==2.32.3
six==1.17.0
stomp.py==8.2.0
tabulate==0.9.0
tomli==2.2.1
typing_extensions==4.13.0
urllib3==2.3.0
wcwidth==0.2.13
websocket-client==1.8.0
-e git+https://github.com/zhmcclient/python-zhmcclient.git@560cdaa7fda0f63688efa192cbfe711324a194ea#egg=zhmcclient
| name: python-zhmcclient
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- charset-normalizer==3.4.1
- click==8.1.8
- click-repl==0.3.0
- click-spinner==0.1.10
- coverage==7.8.0
- decorator==5.2.1
- docopt==0.6.2
- exceptiongroup==1.2.2
- idna==3.10
- iniconfig==2.1.0
- mock==5.2.0
- packaging==24.2
- pbr==6.1.1
- pluggy==1.5.0
- progressbar2==4.5.0
- prompt-toolkit==3.0.50
- pytest==8.3.5
- pytest-cov==6.0.0
- pytest-mock==3.14.0
- python-utils==3.9.1
- requests==2.32.3
- six==1.17.0
- stomp-py==8.2.0
- tabulate==0.9.0
- tomli==2.2.1
- typing-extensions==4.13.0
- urllib3==2.3.0
- wcwidth==0.2.13
- websocket-client==1.8.0
prefix: /opt/conda/envs/python-zhmcclient
| [
"tests/unit/zhmcclient_mock/test_hmc.py::FakedHmcTests::test_hmc_1_cpc",
"tests/unit/zhmcclient_mock/test_hmc.py::FakedHmcTests::test_hmc_2_cpcs",
"tests/unit/zhmcclient_mock/test_hmc.py::FakedHmcTests::test_repr",
"tests/unit/zhmcclient_mock/test_hmc.py::FakedHmcTests::test_res_dict",
"tests/unit/zhmcclient_mock/test_hmc.py::FakedBaseTests::test_manager_repr",
"tests/unit/zhmcclient_mock/test_hmc.py::FakedBaseTests::test_resource_repr",
"tests/unit/zhmcclient_mock/test_hmc.py::FakedAdapterTests::test_adapters_add",
"tests/unit/zhmcclient_mock/test_hmc.py::FakedAdapterTests::test_adapters_list",
"tests/unit/zhmcclient_mock/test_hmc.py::FakedCpcTests::test_cpcs_add",
"tests/unit/zhmcclient_mock/test_hmc.py::FakedCpcTests::test_cpcs_list",
"tests/unit/zhmcclient_mock/test_hmc.py::FakedLparTests::test_lpars_add",
"tests/unit/zhmcclient_mock/test_hmc.py::FakedLparTests::test_lpars_list",
"tests/unit/zhmcclient_mock/test_hmc.py::FakedPartitionTests::test_partitions_add",
"tests/unit/zhmcclient_mock/test_hmc.py::FakedPartitionTests::test_partitions_list",
"tests/unit/zhmcclient_mock/test_urihandler.py::GenericGetPropertiesHandlerTests::test_get",
"tests/unit/zhmcclient_mock/test_urihandler.py::CpcHandlersTests::test_get",
"tests/unit/zhmcclient_mock/test_urihandler.py::AdapterHandlersTests::test_list",
"tests/unit/zhmcclient_mock/test_urihandler.py::PartitionHandlersTests::test_create_verify"
]
| []
| [
"tests/unit/zhmcclient_mock/test_hmc.py::FakedHmcTests::test_hmc",
"tests/unit/zhmcclient_mock/test_hmc.py::FakedBaseTests::test_manager_attr",
"tests/unit/zhmcclient_mock/test_hmc.py::FakedBaseTests::test_resource_attr",
"tests/unit/zhmcclient_mock/test_hmc.py::FakedActivationProfileTests::test_profiles_add",
"tests/unit/zhmcclient_mock/test_hmc.py::FakedActivationProfileTests::test_profiles_attr",
"tests/unit/zhmcclient_mock/test_hmc.py::FakedActivationProfileTests::test_profiles_list",
"tests/unit/zhmcclient_mock/test_hmc.py::FakedActivationProfileTests::test_profiles_remove",
"tests/unit/zhmcclient_mock/test_hmc.py::FakedAdapterTests::test_adapters_attr",
"tests/unit/zhmcclient_mock/test_hmc.py::FakedAdapterTests::test_adapters_remove",
"tests/unit/zhmcclient_mock/test_hmc.py::FakedCpcTests::test_cpcs_attr",
"tests/unit/zhmcclient_mock/test_hmc.py::FakedCpcTests::test_cpcs_remove",
"tests/unit/zhmcclient_mock/test_hmc.py::FakedHbaTests::test_hbas_add",
"tests/unit/zhmcclient_mock/test_hmc.py::FakedHbaTests::test_hbas_attr",
"tests/unit/zhmcclient_mock/test_hmc.py::FakedHbaTests::test_hbas_list",
"tests/unit/zhmcclient_mock/test_hmc.py::FakedHbaTests::test_hbas_remove",
"tests/unit/zhmcclient_mock/test_hmc.py::FakedLparTests::test_lpars_attr",
"tests/unit/zhmcclient_mock/test_hmc.py::FakedLparTests::test_lpars_remove",
"tests/unit/zhmcclient_mock/test_hmc.py::FakedNicTests::test_nics_add",
"tests/unit/zhmcclient_mock/test_hmc.py::FakedNicTests::test_nics_attr",
"tests/unit/zhmcclient_mock/test_hmc.py::FakedNicTests::test_nics_list",
"tests/unit/zhmcclient_mock/test_hmc.py::FakedNicTests::test_nics_remove",
"tests/unit/zhmcclient_mock/test_hmc.py::FakedPartitionTests::test_partitions_attr",
"tests/unit/zhmcclient_mock/test_hmc.py::FakedPartitionTests::test_partitions_remove",
"tests/unit/zhmcclient_mock/test_hmc.py::FakedPortTests::test_ports_add",
"tests/unit/zhmcclient_mock/test_hmc.py::FakedPortTests::test_ports_attr",
"tests/unit/zhmcclient_mock/test_hmc.py::FakedPortTests::test_ports_list",
"tests/unit/zhmcclient_mock/test_hmc.py::FakedPortTests::test_ports_remove",
"tests/unit/zhmcclient_mock/test_hmc.py::FakedVirtualFunctionTests::test_virtual_functions_add",
"tests/unit/zhmcclient_mock/test_hmc.py::FakedVirtualFunctionTests::test_virtual_functions_attr",
"tests/unit/zhmcclient_mock/test_hmc.py::FakedVirtualFunctionTests::test_virtual_functions_list",
"tests/unit/zhmcclient_mock/test_hmc.py::FakedVirtualFunctionTests::test_virtual_functions_remove",
"tests/unit/zhmcclient_mock/test_hmc.py::FakedVirtualSwitchTests::test_virtual_switches_add",
"tests/unit/zhmcclient_mock/test_hmc.py::FakedVirtualSwitchTests::test_virtual_switches_attr",
"tests/unit/zhmcclient_mock/test_hmc.py::FakedVirtualSwitchTests::test_virtual_switches_list",
"tests/unit/zhmcclient_mock/test_hmc.py::FakedVirtualSwitchTests::test_virtual_switches_remove",
"tests/unit/zhmcclient_mock/test_urihandler.py::HTTPErrorTests::test_attributes",
"tests/unit/zhmcclient_mock/test_urihandler.py::HTTPErrorTests::test_response",
"tests/unit/zhmcclient_mock/test_urihandler.py::InvalidResourceErrorTests::test_attributes_no_handler",
"tests/unit/zhmcclient_mock/test_urihandler.py::InvalidResourceErrorTests::test_attributes_with_handler",
"tests/unit/zhmcclient_mock/test_urihandler.py::InvalidMethodErrorTests::test_attributes_no_handler",
"tests/unit/zhmcclient_mock/test_urihandler.py::InvalidMethodErrorTests::test_attributes_with_handler",
"tests/unit/zhmcclient_mock/test_urihandler.py::CpcNotInDpmErrorTests::test_attributes",
"tests/unit/zhmcclient_mock/test_urihandler.py::CpcInDpmErrorTests::test_attributes",
"tests/unit/zhmcclient_mock/test_urihandler.py::UriHandlerHandlerEmptyTests::test_uris_empty_1",
"tests/unit/zhmcclient_mock/test_urihandler.py::UriHandlerHandlerEmptyTests::test_uris_empty_2",
"tests/unit/zhmcclient_mock/test_urihandler.py::UriHandlerHandlerSimpleTests::test_err_begin_extra",
"tests/unit/zhmcclient_mock/test_urihandler.py::UriHandlerHandlerSimpleTests::test_err_begin_missing",
"tests/unit/zhmcclient_mock/test_urihandler.py::UriHandlerHandlerSimpleTests::test_err_end2_extra",
"tests/unit/zhmcclient_mock/test_urihandler.py::UriHandlerHandlerSimpleTests::test_err_end2_missing",
"tests/unit/zhmcclient_mock/test_urihandler.py::UriHandlerHandlerSimpleTests::test_err_end2_slash",
"tests/unit/zhmcclient_mock/test_urihandler.py::UriHandlerHandlerSimpleTests::test_err_end_extra",
"tests/unit/zhmcclient_mock/test_urihandler.py::UriHandlerHandlerSimpleTests::test_err_end_missing",
"tests/unit/zhmcclient_mock/test_urihandler.py::UriHandlerHandlerSimpleTests::test_err_end_slash",
"tests/unit/zhmcclient_mock/test_urihandler.py::UriHandlerHandlerSimpleTests::test_ok1",
"tests/unit/zhmcclient_mock/test_urihandler.py::UriHandlerHandlerSimpleTests::test_ok2",
"tests/unit/zhmcclient_mock/test_urihandler.py::UriHandlerHandlerSimpleTests::test_ok3",
"tests/unit/zhmcclient_mock/test_urihandler.py::UriHandlerMethodTests::test_delete_cpc2",
"tests/unit/zhmcclient_mock/test_urihandler.py::UriHandlerMethodTests::test_get_cpc1",
"tests/unit/zhmcclient_mock/test_urihandler.py::UriHandlerMethodTests::test_get_cpcs",
"tests/unit/zhmcclient_mock/test_urihandler.py::UriHandlerMethodTests::test_post_cpcs",
"tests/unit/zhmcclient_mock/test_urihandler.py::GenericUpdatePropertiesHandlerTests::test_update_verify",
"tests/unit/zhmcclient_mock/test_urihandler.py::VersionHandlerTests::test_get_version",
"tests/unit/zhmcclient_mock/test_urihandler.py::CpcHandlersTests::test_list",
"tests/unit/zhmcclient_mock/test_urihandler.py::CpcHandlersTests::test_update_verify",
"tests/unit/zhmcclient_mock/test_urihandler.py::CpcStartStopHandlerTests::test_start_classic",
"tests/unit/zhmcclient_mock/test_urihandler.py::CpcStartStopHandlerTests::test_stop_classic",
"tests/unit/zhmcclient_mock/test_urihandler.py::CpcStartStopHandlerTests::test_stop_start_dpm",
"tests/unit/zhmcclient_mock/test_urihandler.py::CpcExportPortNamesListHandlerTests::test_invoke_err_no_input",
"tests/unit/zhmcclient_mock/test_urihandler.py::CpcExportPortNamesListHandlerTests::test_invoke_ok",
"tests/unit/zhmcclient_mock/test_urihandler.py::AdapterHandlersTests::test_get",
"tests/unit/zhmcclient_mock/test_urihandler.py::AdapterHandlersTests::test_update_verify",
"tests/unit/zhmcclient_mock/test_urihandler.py::NetworkPortHandlersTests::test_get",
"tests/unit/zhmcclient_mock/test_urihandler.py::NetworkPortHandlersTests::test_update_verify",
"tests/unit/zhmcclient_mock/test_urihandler.py::StoragePortHandlersTests::test_get",
"tests/unit/zhmcclient_mock/test_urihandler.py::StoragePortHandlersTests::test_update_verify",
"tests/unit/zhmcclient_mock/test_urihandler.py::PartitionHandlersTests::test_delete_verify",
"tests/unit/zhmcclient_mock/test_urihandler.py::PartitionHandlersTests::test_get",
"tests/unit/zhmcclient_mock/test_urihandler.py::PartitionHandlersTests::test_list",
"tests/unit/zhmcclient_mock/test_urihandler.py::PartitionHandlersTests::test_update_verify",
"tests/unit/zhmcclient_mock/test_urihandler.py::PartitionStartStopHandlerTests::test_start_stop",
"tests/unit/zhmcclient_mock/test_urihandler.py::HbaHandlerTests::test_create_verify",
"tests/unit/zhmcclient_mock/test_urihandler.py::HbaHandlerTests::test_delete_verify",
"tests/unit/zhmcclient_mock/test_urihandler.py::HbaHandlerTests::test_get",
"tests/unit/zhmcclient_mock/test_urihandler.py::HbaHandlerTests::test_list",
"tests/unit/zhmcclient_mock/test_urihandler.py::HbaHandlerTests::test_update_verify",
"tests/unit/zhmcclient_mock/test_urihandler.py::NicHandlerTests::test_create_verify",
"tests/unit/zhmcclient_mock/test_urihandler.py::NicHandlerTests::test_delete_verify",
"tests/unit/zhmcclient_mock/test_urihandler.py::NicHandlerTests::test_get",
"tests/unit/zhmcclient_mock/test_urihandler.py::NicHandlerTests::test_list",
"tests/unit/zhmcclient_mock/test_urihandler.py::NicHandlerTests::test_update_verify",
"tests/unit/zhmcclient_mock/test_urihandler.py::VirtualFunctionHandlerTests::test_create_verify",
"tests/unit/zhmcclient_mock/test_urihandler.py::VirtualFunctionHandlerTests::test_delete_verify",
"tests/unit/zhmcclient_mock/test_urihandler.py::VirtualFunctionHandlerTests::test_get",
"tests/unit/zhmcclient_mock/test_urihandler.py::VirtualFunctionHandlerTests::test_list",
"tests/unit/zhmcclient_mock/test_urihandler.py::VirtualFunctionHandlerTests::test_update_verify",
"tests/unit/zhmcclient_mock/test_urihandler.py::VirtualSwitchHandlersTests::test_get",
"tests/unit/zhmcclient_mock/test_urihandler.py::VirtualSwitchHandlersTests::test_list",
"tests/unit/zhmcclient_mock/test_urihandler.py::LparHandlersTests::test_get",
"tests/unit/zhmcclient_mock/test_urihandler.py::LparHandlersTests::test_list",
"tests/unit/zhmcclient_mock/test_urihandler.py::LparActLoadDeactHandlerTests::test_start_stop",
"tests/unit/zhmcclient_mock/test_urihandler.py::ResetActProfileHandlersTests::test_get",
"tests/unit/zhmcclient_mock/test_urihandler.py::ResetActProfileHandlersTests::test_list",
"tests/unit/zhmcclient_mock/test_urihandler.py::ImageActProfileHandlersTests::test_get",
"tests/unit/zhmcclient_mock/test_urihandler.py::ImageActProfileHandlersTests::test_list",
"tests/unit/zhmcclient_mock/test_urihandler.py::LoadActProfileHandlersTests::test_get",
"tests/unit/zhmcclient_mock/test_urihandler.py::LoadActProfileHandlersTests::test_list"
]
| []
| Apache License 2.0 | 1,102 | [
"docs/changes.rst",
"zhmcclient_mock/_session.py",
"zhmcclient_mock/_hmc.py"
]
| [
"docs/changes.rst",
"zhmcclient_mock/_session.py",
"zhmcclient_mock/_hmc.py"
]
|
cdent__gabbi-210 | 867042bc99a844d37e3eb5e7dd748dbb0734ac69 | 2017-03-20 19:06:23 | 867042bc99a844d37e3eb5e7dd748dbb0734ac69 | diff --git a/gabbi/handlers/base.py b/gabbi/handlers/base.py
index 03c37e3..055aff8 100644
--- a/gabbi/handlers/base.py
+++ b/gabbi/handlers/base.py
@@ -13,6 +13,9 @@
"""Base classes for response and content handlers."""
+from gabbi.exception import GabbiFormatError
+
+
class ResponseHandler(object):
"""Add functionality for making assertions about an HTTP response.
@@ -38,6 +41,11 @@ class ResponseHandler(object):
def __call__(self, test):
if test.test_data[self._key]:
self.preprocess(test)
+ if type(self.test_key_value) != type(test.test_data[self._key]):
+ raise GabbiFormatError(
+ "%s in '%s' has incorrect type, must be %s"
+ % (self._key, test.test_data['name'],
+ type(self.test_key_value)))
for item in test.test_data[self._key]:
try:
value = test.test_data[self._key][item]
| response_string can lead to false positives if value is a string
The correct way to do the `response_strings` test is to provide a list of strings:
```yaml
response_strings:
- foo
- bar
```
If you forget and do a string, though, it can sometimes pass because the value is use as an iterable and strings iterate. In the following example of the response contains the letters `f` and `o` the test will pass:
```yaml
response_strings:
foo
```
Stronger type checking is required. | cdent/gabbi | diff --git a/gabbi/tests/test_handlers.py b/gabbi/tests/test_handlers.py
index df655b4..3caa086 100644
--- a/gabbi/tests/test_handlers.py
+++ b/gabbi/tests/test_handlers.py
@@ -17,6 +17,7 @@ import json
import unittest
from gabbi import case
+from gabbi.exception import GabbiFormatError
from gabbi.handlers import core
from gabbi.handlers import jsonhandler
from gabbi import suitemaker
@@ -104,6 +105,19 @@ class HandlersTest(unittest.TestCase):
# Check the pprint of the json
self.assertIn(' "location": "house"', msg)
+ def test_response_string_list_type(self):
+ handler = core.StringResponseHandler()
+ self.test.test_data = {
+ 'name': 'omega test',
+ 'response_strings': 'omega'
+ }
+ self.test.output = 'omega\n'
+ with self.assertRaises(GabbiFormatError) as exc:
+ self._assert_handler(handler)
+ self.assertIn('has incorrect type', str(exc))
+ self.assertIn("response_strings in 'omega test'",
+ str(exc))
+
def test_response_json_paths(self):
handler = jsonhandler.JSONHandler()
self.test.content_type = "application/json"
@@ -178,6 +192,19 @@ class HandlersTest(unittest.TestCase):
}
self._assert_handler(handler)
+ def test_response_json_paths_dict_type(self):
+ handler = jsonhandler.JSONHandler()
+ self.test.test_data = {
+ 'name': 'omega test',
+ 'response_json_paths': ['alpha', 'beta']
+ }
+ self.test.output = 'omega\n'
+ with self.assertRaises(GabbiFormatError) as exc:
+ self._assert_handler(handler)
+ self.assertIn('has incorrect type', str(exc))
+ self.assertIn("response_json_paths in 'omega test'",
+ str(exc))
+
def test_response_headers(self):
handler = core.HeadersResponseHandler()
self.test.response = {'content-type': 'text/plain'}
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 1
} | 1.32 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.6",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
colorama==0.4.5
decorator==5.1.1
fixtures==4.0.1
-e git+https://github.com/cdent/gabbi.git@867042bc99a844d37e3eb5e7dd748dbb0734ac69#egg=gabbi
importlib-metadata==4.8.3
iniconfig==1.1.1
jsonpath-rw==1.4.0
jsonpath-rw-ext==1.2.2
packaging==21.3
pbr==6.1.1
pluggy==1.0.0
ply==3.11
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
PyYAML==6.0.1
six==1.17.0
testtools==2.6.0
tomli==1.2.3
typing_extensions==4.1.1
urllib3==1.26.20
wsgi_intercept==1.13.1
zipp==3.6.0
| name: gabbi
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- colorama==0.4.5
- decorator==5.1.1
- fixtures==4.0.1
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- jsonpath-rw==1.4.0
- jsonpath-rw-ext==1.2.2
- packaging==21.3
- pbr==6.1.1
- pluggy==1.0.0
- ply==3.11
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- pyyaml==6.0.1
- six==1.17.0
- testtools==2.6.0
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- wsgi-intercept==1.13.1
- zipp==3.6.0
prefix: /opt/conda/envs/gabbi
| [
"gabbi/tests/test_handlers.py::HandlersTest::test_response_json_paths_dict_type",
"gabbi/tests/test_handlers.py::HandlersTest::test_response_string_list_type"
]
| []
| [
"gabbi/tests/test_handlers.py::HandlersTest::test_empty_response_handler",
"gabbi/tests/test_handlers.py::HandlersTest::test_resonse_headers_stringify",
"gabbi/tests/test_handlers.py::HandlersTest::test_response_headers",
"gabbi/tests/test_handlers.py::HandlersTest::test_response_headers_fail_data",
"gabbi/tests/test_handlers.py::HandlersTest::test_response_headers_fail_header",
"gabbi/tests/test_handlers.py::HandlersTest::test_response_headers_regex",
"gabbi/tests/test_handlers.py::HandlersTest::test_response_json_paths",
"gabbi/tests/test_handlers.py::HandlersTest::test_response_json_paths_fail_data",
"gabbi/tests/test_handlers.py::HandlersTest::test_response_json_paths_fail_path",
"gabbi/tests/test_handlers.py::HandlersTest::test_response_json_paths_regex",
"gabbi/tests/test_handlers.py::HandlersTest::test_response_json_paths_regex_number",
"gabbi/tests/test_handlers.py::HandlersTest::test_response_strings",
"gabbi/tests/test_handlers.py::HandlersTest::test_response_strings_fail",
"gabbi/tests/test_handlers.py::HandlersTest::test_response_strings_fail_big_output",
"gabbi/tests/test_handlers.py::HandlersTest::test_response_strings_fail_big_payload"
]
| []
| Apache License 2.0 | 1,103 | [
"gabbi/handlers/base.py"
]
| [
"gabbi/handlers/base.py"
]
|
|
Backblaze__B2_Command_Line_Tool-332 | 26ba7c389b732b2202da62a28826a893a8d47749 | 2017-03-20 21:04:08 | 4e3ee3d1d9bdaf7bbd164dfaa812868aa92c2d32 | diff --git a/b2/sync/policy.py b/b2/sync/policy.py
index b8e7435..5b74f97 100644
--- a/b2/sync/policy.py
+++ b/b2/sync/policy.py
@@ -240,7 +240,6 @@ def make_b2_keep_days_actions(
only the 25-day old version can be deleted. The 15 day-old version
was visible 10 days ago.
"""
- prev_age_days = None
deleting = False
if dest_file is None:
# B2 does not really store folders, so there is no need to hide
@@ -250,8 +249,17 @@ def make_b2_keep_days_actions(
# How old is this version?
age_days = (now_millis - version.mod_time) / ONE_DAY_IN_MS
- # We assume that the versions are ordered by time, newest first.
- assert prev_age_days is None or prev_age_days <= age_days
+ # Mostly, the versions are ordered by time, newest first,
+ # BUT NOT ALWAYS. The mod time we have is the src_last_modified_millis
+ # from the file info (if present), or the upload start time
+ # (if not present). The user-specified src_last_modified_millis
+ # may not be in order. Because of that, we no longer
+ # assert that age_days is non-decreasing.
+ #
+ # Note that if there is an out-of-order date that is old enough
+ # to trigger deletions, all of the versions uploaded before that
+ # (the ones after it in the list) will be deleted, even if they
+ # aren't over the age threshold.
# Do we need to hide this version?
if version_index == 0 and source_file is None and version.action == 'upload':
@@ -275,6 +283,3 @@ def make_b2_keep_days_actions(
# age of this one?
if keep_days < age_days:
deleting = True
-
- # Remember this age for next time around the loop.
- prev_age_days = age_days
| CLI Sync errors
Hi all,
after i finished my first sync to Cloud after 3 weeks, i have now errors while syncing new files to the Cloud.
The following lines occurs after a few seconds when i start my CLI Command
```
C:\Program Files\Python36\Scripts>b2.exe sync --excludeRegex DfsrPrivate --threads 10 --keepDays 30 --replaceNewer \\?\D:\DFS\Daten b2://Nuernberg01/Daten
ERROR:b2.console_tool:ConsoleTool unexpected exception
Traceback (most recent call last):
File "c:\program files\python36\lib\site-packages\b2\console_tool.py", line 992, in run_command
return command.run(args)
File "c:\program files\python36\lib\site-packages\b2\console_tool.py", line 781, in run
dry_run=args.dryRun,
File "c:\program files\python36\lib\site-packages\logfury\v0_1\trace_call.py", line 84, in wrapper
return function(*wrapee_args, **wrapee_kwargs)
File "c:\program files\python36\lib\site-packages\b2\sync\sync.py", line 251, in sync_folders
source_folder, dest_folder, args, now_millis, reporter
File "c:\program files\python36\lib\site-packages\b2\sync\sync.py", line 150, in make_folder_sync_actions
sync_type, source_file, dest_file, source_folder, dest_folder, args, now_millis
File "c:\program files\python36\lib\site-packages\b2\sync\sync.py", line 106, in make_file_sync_actions
for action in policy.get_all_actions():
File "c:\program files\python36\lib\site-packages\b2\sync\policy.py", line 104, in get_all_actions
for action in self._get_hide_delete_actions():
File "c:\program files\python36\lib\site-packages\b2\sync\policy.py", line 177, in _get_hide_delete_actions
self._keepDays, self._now_millis
File "c:\program files\python36\lib\site-packages\b2\sync\policy.py", line 254, in make_b2_keep_days_actions
assert prev_age_days is None or prev_age_days <= age_days
AssertionError
Traceback (most recent call last):
File "C:\Program Files\Python36\Scripts\b2-script.py", line 11, in <module>
load_entry_point('b2==0.7.0', 'console_scripts', 'b2')()
File "c:\program files\python36\lib\site-packages\b2\console_tool.py", line 1104, in main
exit_status = ct.run_command(decoded_argv)
File "c:\program files\python36\lib\site-packages\b2\console_tool.py", line 992, in run_command
return command.run(args)
File "c:\program files\python36\lib\site-packages\b2\console_tool.py", line 781, in run
dry_run=args.dryRun,
File "c:\program files\python36\lib\site-packages\logfury\v0_1\trace_call.py", line 84, in wrapper
return function(*wrapee_args, **wrapee_kwargs)
File "c:\program files\python36\lib\site-packages\b2\sync\sync.py", line 251, in sync_folders
source_folder, dest_folder, args, now_millis, reporter
File "c:\program files\python36\lib\site-packages\b2\sync\sync.py", line 150, in make_folder_sync_actions
sync_type, source_file, dest_file, source_folder, dest_folder, args, now_millis
File "c:\program files\python36\lib\site-packages\b2\sync\sync.py", line 106, in make_file_sync_actions
for action in policy.get_all_actions():
File "c:\program files\python36\lib\site-packages\b2\sync\policy.py", line 104, in get_all_actions
for action in self._get_hide_delete_actions():
File "c:\program files\python36\lib\site-packages\b2\sync\policy.py", line 177, in _get_hide_delete_actions
self._keepDays, self._now_millis
File "c:\program files\python36\lib\site-packages\b2\sync\policy.py", line 254, in make_b2_keep_days_actions
assert prev_age_days is None or prev_age_days <= age_days
AssertionError
```
I have no Idea what to do?
| Backblaze/B2_Command_Line_Tool | diff --git a/test/test_policy.py b/test/test_policy.py
new file mode 100644
index 0000000..bcc0ec4
--- /dev/null
+++ b/test/test_policy.py
@@ -0,0 +1,77 @@
+######################################################################
+#
+# File: test_policy
+#
+# Copyright 2017, Backblaze Inc. All Rights Reserved.
+#
+# License https://www.backblaze.com/using_b2_code.html
+#
+######################################################################
+
+from b2.sync.file import File, FileVersion
+from b2.sync.folder import B2Folder
+from b2.sync.policy import make_b2_keep_days_actions
+from .test_base import TestBase
+
+try:
+ from unittest.mock import MagicMock
+except ImportError:
+ from mock import MagicMock
+
+
+class TestMakeB2KeepDaysActions(TestBase):
+ def setUp(self):
+ self.keep_days = 7
+ self.today = 100 * 86400
+ self.one_day_millis = 86400 * 1000
+
+ def test_no_versions(self):
+ self.check_one_answer(True, [], [])
+
+ def test_new_version_no_action(self):
+ self.check_one_answer(True, [(1, -5, 'upload')], [])
+
+ def test_no_source_one_old_version_hides(self):
+ # An upload that is old gets deleted if there is no source file.
+ self.check_one_answer(False, [(1, -10, 'upload')], ['b2_hide(folder/a)'])
+
+ def test_old_hide_causes_delete(self):
+ # A hide marker that is old gets deleted, as do the things after it.
+ self.check_one_answer(
+ True, [(1, -5, 'upload'), (2, -10, 'hide'), (3, -20, 'upload')],
+ ['b2_delete(folder/a, 2, (hide marker))', 'b2_delete(folder/a, 3, (old version))']
+ )
+
+ def test_old_upload_causes_delete(self):
+ # An upload that is old stays if there is a source file, but things
+ # behind it go away.
+ self.check_one_answer(
+ True, [(1, -5, 'upload'), (2, -10, 'upload'), (3, -20, 'upload')],
+ ['b2_delete(folder/a, 3, (old version))']
+ )
+
+ def test_out_of_order_dates(self):
+ # The one at date -3 will get deleted because the one before it is old.
+ self.check_one_answer(
+ True, [(1, -5, 'upload'), (2, -10, 'upload'), (3, -3, 'upload')],
+ ['b2_delete(folder/a, 3, (old version))']
+ )
+
+ def check_one_answer(self, has_source, id_relative_date_action_list, expected_actions):
+ source_file = File('a', []) if has_source else None
+ dest_file_versions = [
+ FileVersion(id_, 'a', self.today + relative_date * self.one_day_millis, action, 100)
+ for (id_, relative_date, action) in id_relative_date_action_list
+ ]
+ dest_file = File('a', dest_file_versions)
+ bucket = MagicMock()
+ api = MagicMock()
+ api.get_bucket_by_name.return_value = bucket
+ dest_folder = B2Folder('bucket-1', 'folder', api)
+ actual_actions = list(
+ make_b2_keep_days_actions(
+ source_file, dest_file, dest_folder, dest_folder, self.keep_days, self.today
+ )
+ )
+ actual_action_strs = [str(a) for a in actual_actions]
+ self.assertEqual(expected_actions, actual_action_strs)
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 1
} | 0.7 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"nose",
"mock",
"pyflakes",
"yapf",
"pytest"
],
"pre_install": null,
"python": "3.5",
"reqs_path": [
"requirements.txt",
"requirements-test.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | arrow==1.2.3
attrs==22.2.0
-e git+https://github.com/Backblaze/B2_Command_Line_Tool.git@26ba7c389b732b2202da62a28826a893a8d47749#egg=b2
certifi==2021.5.30
charset-normalizer==2.0.12
idna==3.10
importlib-metadata==4.8.3
importlib-resources==5.4.0
iniconfig==1.1.1
logfury==1.0.1
mock==5.2.0
nose==1.3.7
packaging==21.3
pluggy==1.0.0
py==1.11.0
pyflakes==3.0.1
pyparsing==3.1.4
pytest==7.0.1
python-dateutil==2.9.0.post0
requests==2.27.1
six==1.17.0
tomli==1.2.3
tqdm==4.64.1
typing_extensions==4.1.1
urllib3==1.26.20
yapf==0.32.0
zipp==3.6.0
| name: B2_Command_Line_Tool
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- arrow==1.2.3
- attrs==22.2.0
- charset-normalizer==2.0.12
- idna==3.10
- importlib-metadata==4.8.3
- importlib-resources==5.4.0
- iniconfig==1.1.1
- logfury==1.0.1
- mock==5.2.0
- nose==1.3.7
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pyflakes==3.0.1
- pyparsing==3.1.4
- pytest==7.0.1
- python-dateutil==2.9.0.post0
- requests==2.27.1
- six==1.17.0
- tomli==1.2.3
- tqdm==4.64.1
- typing-extensions==4.1.1
- urllib3==1.26.20
- yapf==0.32.0
- zipp==3.6.0
prefix: /opt/conda/envs/B2_Command_Line_Tool
| [
"test/test_policy.py::TestMakeB2KeepDaysActions::test_out_of_order_dates"
]
| []
| [
"test/test_policy.py::TestMakeB2KeepDaysActions::test_new_version_no_action",
"test/test_policy.py::TestMakeB2KeepDaysActions::test_no_source_one_old_version_hides",
"test/test_policy.py::TestMakeB2KeepDaysActions::test_no_versions",
"test/test_policy.py::TestMakeB2KeepDaysActions::test_old_hide_causes_delete",
"test/test_policy.py::TestMakeB2KeepDaysActions::test_old_upload_causes_delete"
]
| []
| MIT License | 1,104 | [
"b2/sync/policy.py"
]
| [
"b2/sync/policy.py"
]
|
|
streamlink__streamlink-724 | 951edb3ef8127598ec518e5ebab7dedf5e00f68c | 2017-03-21 09:08:18 | 4b09107829230265fbb5e54a28fccbf1332d0cc4 | diff --git a/src/streamlink/plugins/bigo.py b/src/streamlink/plugins/bigo.py
index 98101c78..365d38cd 100644
--- a/src/streamlink/plugins/bigo.py
+++ b/src/streamlink/plugins/bigo.py
@@ -47,7 +47,7 @@ class BigoStream(Stream):
class Bigo(Plugin):
- _url_re = re.compile(r"https?://(live.bigo.tv/\d+|bigoweb.co/show/\d+)")
+ _url_re = re.compile(r"https?://(?:www\.)?(bigo\.tv/\d+|bigoweb\.co/show/\d+)")
_flashvars_re = flashvars = re.compile(
r'''^\s*(?<!<!--)<param.*value="tmp=(\d+)&channel=(\d+)&srv=(\d+\.\d+\.\d+\.\d+)&port=(\d+)"''',
re.M)
| bigoweb.co / bigolive.tv plugin no longer works
----
### Checklist
- [x] This is a bug report.
- [ ] This is a plugin request.
- [ ] This is a feature request.
- [ ] I used the search function to find already opened/closed issues or pull requests.
### Description
error: No plugin can handle bigoweb.co addressess as of 10 minutes ago
### Expected / Actual behavior
10 minutes ago it worked fine and now it gives an error.
...
### Reproduction steps / Stream URLs to test
1. ...http://www.bigoweb.co/ (any stream)
2. ...
3. ...
### Environment details (operating system, python version, etc.)
...
### Comments, logs, screenshots, etc.
...
| streamlink/streamlink | diff --git a/tests/test_plugin_bigo.py b/tests/test_plugin_bigo.py
new file mode 100644
index 00000000..99bd4e17
--- /dev/null
+++ b/tests/test_plugin_bigo.py
@@ -0,0 +1,31 @@
+import unittest
+
+from streamlink.plugins.bigo import Bigo
+
+
+class TestPluginBongacams(unittest.TestCase):
+ def test_can_handle_url(self):
+ # Correct urls
+ self.assertTrue(Bigo.can_handle_url("http://www.bigoweb.co/show/00000000"))
+ self.assertTrue(Bigo.can_handle_url("https://www.bigoweb.co/show/00000000"))
+ self.assertTrue(Bigo.can_handle_url("http://bigoweb.co/show/00000000"))
+ self.assertTrue(Bigo.can_handle_url("https://bigoweb.co/show/00000000"))
+ self.assertTrue(Bigo.can_handle_url("http://bigo.tv/00000000"))
+ self.assertTrue(Bigo.can_handle_url("https://bigo.tv/00000000"))
+ self.assertTrue(Bigo.can_handle_url("https://www.bigo.tv/00000000"))
+ self.assertTrue(Bigo.can_handle_url("http://www.bigo.tv/00000000"))
+
+ # Old URLs don't work anymore
+ self.assertFalse(Bigo.can_handle_url("http://live.bigo.tv/00000000"))
+ self.assertFalse(Bigo.can_handle_url("https://live.bigo.tv/00000000"))
+
+ # Wrong URL structure
+ self.assertFalse(Bigo.can_handle_url("ftp://www.bigo.tv/00000000"))
+ self.assertFalse(Bigo.can_handle_url("https://www.bigo.tv/show/00000000"))
+ self.assertFalse(Bigo.can_handle_url("http://www.bigo.tv/show/00000000"))
+ self.assertFalse(Bigo.can_handle_url("http://bigo.tv/show/00000000"))
+ self.assertFalse(Bigo.can_handle_url("https://bigo.tv/show/00000000"))
+
+
+if __name__ == "__main__":
+ unittest.main()
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 1
} | 0.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"codecov",
"coverage",
"mock"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.6",
"reqs_path": [
"dev-requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
charset-normalizer==2.0.12
codecov==2.1.13
coverage==6.2
distlib==0.3.9
idna==3.10
importlib-metadata==4.8.3
iniconfig==1.1.1
iso-639==0.4.5
iso3166==2.1.1
Jinja2==3.0.3
MarkupSafe==2.0.1
mock==5.2.0
packaging==21.3
pluggy==1.0.0
py==1.11.0
pycryptodome==3.21.0
pynsist==2.8
pyparsing==3.1.4
pytest==7.0.1
pytest-cov==4.0.0
requests==2.27.1
requests_download==0.1.2
-e git+https://github.com/streamlink/streamlink.git@951edb3ef8127598ec518e5ebab7dedf5e00f68c#egg=streamlink
tomli==1.2.3
typing_extensions==4.1.1
urllib3==1.26.20
yarg==0.1.10
zipp==3.6.0
| name: streamlink
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- charset-normalizer==2.0.12
- codecov==2.1.13
- coverage==6.2
- distlib==0.3.9
- idna==3.10
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- iso-639==0.4.5
- iso3166==2.1.1
- jinja2==3.0.3
- markupsafe==2.0.1
- mock==5.2.0
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pycryptodome==3.21.0
- pynsist==2.8
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-cov==4.0.0
- requests==2.27.1
- requests-download==0.1.2
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- yarg==0.1.10
- zipp==3.6.0
prefix: /opt/conda/envs/streamlink
| [
"tests/test_plugin_bigo.py::TestPluginBongacams::test_can_handle_url"
]
| []
| []
| []
| BSD 2-Clause "Simplified" License | 1,105 | [
"src/streamlink/plugins/bigo.py"
]
| [
"src/streamlink/plugins/bigo.py"
]
|
|
amueller__word_cloud-242 | 4fc252d97045fa3616a7f13cbdd56eddca8ff008 | 2017-03-21 15:49:32 | 6b76b8d58df02cfe4a3a7a5ba668a60a478e6190 | diff --git a/wordcloud/wordcloud.py b/wordcloud/wordcloud.py
index ae5d107..e63d922 100644
--- a/wordcloud/wordcloud.py
+++ b/wordcloud/wordcloud.py
@@ -348,7 +348,11 @@ class WordCloud(object):
"""
# make sure frequencies are sorted and normalized
frequencies = sorted(frequencies.items(), key=item1, reverse=True)
+ if len(frequencies) <= 0:
+ raise ValueError("We need at least 1 word to plot a word cloud, "
+ "got %d." % len(frequencies))
frequencies = frequencies[:self.max_words]
+
# largest entry will be 1
max_frequency = float(frequencies[0][1])
@@ -360,10 +364,6 @@ class WordCloud(object):
else:
random_state = Random()
- if len(frequencies) <= 0:
- print("We need at least 1 word to plot a word cloud, got %d."
- % len(frequencies))
-
if self.mask is not None:
mask = self.mask
width = mask.shape[1]
| Checking 'frequencies' list in function WordCloud.generate_from_frequencies()
I'm having an issue related to the generation of the word_cloud from a list of frequencies per word. As you can see below, the function is trying to retrieve the max frequency from a list 'frecuencies', but it doesn't check before if there is some element in that list:
https://github.com/amueller/word_cloud/blob/master/wordcloud/wordcloud.py#L350
That kind of check is done after, in the following snippet (but it's too late, because we already tried to access to the first element on a probably empty list):
https://github.com/amueller/word_cloud/blob/master/wordcloud/wordcloud.py#L360-L362
In my own version, I've done the following correction related to the first snippet in order to avoid some crashes (just as an example):
```
try:
max_frequency = float(frequencies[0][1])
except:
print "ERROR with list 'frecuencies'=", frequencies
```
Best regards!
| amueller/word_cloud | diff --git a/test/test_wordcloud.py b/test/test_wordcloud.py
index ee91f2e..2d2662b 100644
--- a/test/test_wordcloud.py
+++ b/test/test_wordcloud.py
@@ -76,6 +76,16 @@ def test_multiple_s():
assert_in("flosss", wc.words_)
+def test_empty_text():
+ # test originally empty text raises an exception
+ wc = WordCloud(stopwords=[])
+ assert_raises(ValueError, wc.generate, '')
+
+ # test empty-after-filtering text raises an exception
+ wc = WordCloud(stopwords=['a', 'b'])
+ assert_raises(ValueError, wc.generate, 'a b a')
+
+
def test_default():
# test that default word cloud creation and conversions work
wc = WordCloud(max_words=50)
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 1,
"test_score": 1
},
"num_modified_files": 1
} | 1.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"nose",
"mock",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc g++"
],
"python": "3.6",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
certifi==2021.5.30
cycler==0.11.0
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
kiwisolver==1.3.1
matplotlib==3.3.4
mock==5.2.0
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
nose==1.3.7
numpy==1.19.5
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
Pillow==8.4.0
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
python-dateutil==2.9.0.post0
six==1.17.0
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
-e git+https://github.com/amueller/word_cloud.git@4fc252d97045fa3616a7f13cbdd56eddca8ff008#egg=wordcloud
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: word_cloud
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib-metadata=4.8.1=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- cycler==0.11.0
- kiwisolver==1.3.1
- matplotlib==3.3.4
- mock==5.2.0
- nose==1.3.7
- numpy==1.19.5
- pillow==8.4.0
- python-dateutil==2.9.0.post0
- six==1.17.0
prefix: /opt/conda/envs/word_cloud
| [
"test/test_wordcloud.py::test_empty_text"
]
| []
| [
"test/test_wordcloud.py::test_collocations",
"test/test_wordcloud.py::test_plurals_numbers",
"test/test_wordcloud.py::test_multiple_s",
"test/test_wordcloud.py::test_default",
"test/test_wordcloud.py::test_stopwords_lowercasing",
"test/test_wordcloud.py::test_writing_to_file",
"test/test_wordcloud.py::test_check_errors",
"test/test_wordcloud.py::test_recolor",
"test/test_wordcloud.py::test_random_state",
"test/test_wordcloud.py::test_mask",
"test/test_wordcloud.py::test_single_color_func",
"test/test_wordcloud.py::test_single_color_func_grey",
"test/test_wordcloud.py::test_process_text",
"test/test_wordcloud.py::test_generate_from_frequencies",
"test/test_wordcloud.py::test_relative_scaling_zero"
]
| []
| MIT License | 1,106 | [
"wordcloud/wordcloud.py"
]
| [
"wordcloud/wordcloud.py"
]
|
|
amueller__word_cloud-243 | 2b868941a71e0ad6efac3b25433b97e8776e381b | 2017-03-21 16:29:49 | 6b76b8d58df02cfe4a3a7a5ba668a60a478e6190 | amueller: This PR only adds the flag, but keeps the default as "true", right?
That seems like a good solution to me. The program is more "here is some text, make a word-cloud out of it". And separating two words that always appear together isn't that intuitive to me.
cjmay: Okay, I'll change it: the default behavior in the CLI would be switched in the current PR.
amueller: thanks :) | diff --git a/wordcloud/wordcloud.py b/wordcloud/wordcloud.py
index e63d922..ae5d107 100644
--- a/wordcloud/wordcloud.py
+++ b/wordcloud/wordcloud.py
@@ -348,11 +348,7 @@ class WordCloud(object):
"""
# make sure frequencies are sorted and normalized
frequencies = sorted(frequencies.items(), key=item1, reverse=True)
- if len(frequencies) <= 0:
- raise ValueError("We need at least 1 word to plot a word cloud, "
- "got %d." % len(frequencies))
frequencies = frequencies[:self.max_words]
-
# largest entry will be 1
max_frequency = float(frequencies[0][1])
@@ -364,6 +360,10 @@ class WordCloud(object):
else:
random_state = Random()
+ if len(frequencies) <= 0:
+ print("We need at least 1 word to plot a word cloud, got %d."
+ % len(frequencies))
+
if self.mask is not None:
mask = self.mask
width = mask.shape[1]
diff --git a/wordcloud/wordcloud_cli.py b/wordcloud/wordcloud_cli.py
index 2a54deb..cc105fe 100644
--- a/wordcloud/wordcloud_cli.py
+++ b/wordcloud/wordcloud_cli.py
@@ -16,7 +16,8 @@ def main(args):
wordcloud = wc.WordCloud(stopwords=args.stopwords, mask=args.mask,
width=args.width, height=args.height, font_path=args.font_path,
margin=args.margin, relative_scaling=args.relative_scaling,
- color_func=args.color_func, background_color=args.background_color).generate(args.text)
+ color_func=args.color_func, background_color=args.background_color,
+ collocations=args.collocations).generate(args.text)
image = wordcloud.to_image()
with args.imagefile:
@@ -51,6 +52,8 @@ def parse_args(arguments):
help='use given color as coloring for the image - accepts any value from PIL.ImageColor.getcolor')
parser.add_argument('--background', metavar='color', default='black', type=str, dest='background_color',
help='use given color as background color for the image - accepts any value from PIL.ImageColor.getcolor')
+ parser.add_argument('--no_collocations', action='store_true',
+ help='do not add collocations (bigrams) to word cloud (default: add unigrams and bigrams)')
args = parser.parse_args(arguments)
if args.colormask and args.color:
@@ -74,6 +77,8 @@ def parse_args(arguments):
if args.color:
color_func = wc.get_single_color_func(args.color)
+ args.collocations = not args.no_collocations
+
args.color_func = color_func
return args
| Some text distributions yield duplicate words in image
As a workaround for #226 I tried generating text according to the desired distribution: [food.txt](https://github.com/amueller/word_cloud/files/858596/food.txt)
Note in particular there are few word types but relatively high number of tokens per type compared to natural language. Tokens of each word type are also grouped. (My guess would be one or both of these properties are causing the issue.)
However, `wordcloud_cli.py` (from master) on this generated text file, with default options, yielded a word cloud with each word duplicated conspicuously: 
| amueller/word_cloud | diff --git a/test/test_wordcloud.py b/test/test_wordcloud.py
index 2d2662b..ee91f2e 100644
--- a/test/test_wordcloud.py
+++ b/test/test_wordcloud.py
@@ -76,16 +76,6 @@ def test_multiple_s():
assert_in("flosss", wc.words_)
-def test_empty_text():
- # test originally empty text raises an exception
- wc = WordCloud(stopwords=[])
- assert_raises(ValueError, wc.generate, '')
-
- # test empty-after-filtering text raises an exception
- wc = WordCloud(stopwords=['a', 'b'])
- assert_raises(ValueError, wc.generate, 'a b a')
-
-
def test_default():
# test that default word cloud creation and conversions work
wc = WordCloud(max_words=50)
diff --git a/test/test_wordcloud_cli.py b/test/test_wordcloud_cli.py
index bdfef88..d350d81 100644
--- a/test/test_wordcloud_cli.py
+++ b/test/test_wordcloud_cli.py
@@ -17,7 +17,10 @@ ARGUMENT_SPEC_TYPED = [
ArgOption(cli_name='width', init_name='width', pass_value=13, fail_value=1.),
ArgOption(cli_name='height', init_name='height', pass_value=15, fail_value=1.),
ArgOption(cli_name='margin', init_name='margin', pass_value=17, fail_value=1.),
- ArgOption(cli_name='relative_scaling', init_name='relative_scaling', pass_value=1, fail_value='c')
+ ArgOption(cli_name='relative_scaling', init_name='relative_scaling', pass_value=1, fail_value='c'),
+]
+ARGUMENT_SPEC_UNARY = [
+ ArgOption(cli_name='no_collocations', init_name='collocations', pass_value=True, fail_value=1)
]
ARGUMENT_SPEC_REMAINING = [
ArgOption(cli_name='stopwords', init_name='stopwords', pass_value=temp.name, fail_value=None),
@@ -26,10 +29,12 @@ ARGUMENT_SPEC_REMAINING = [
ArgOption(cli_name='color', init_name='color_func', pass_value='red', fail_value=None),
ArgOption(cli_name='background', init_name='background_color', pass_value='grey', fail_value=None)
]
+ARGUMENT_CLI_NAMES_UNARY = [arg_opt.cli_name for arg_opt in ARGUMENT_SPEC_UNARY]
def all_arguments():
arguments = []
arguments.extend(ARGUMENT_SPEC_TYPED)
+ arguments.extend(ARGUMENT_SPEC_UNARY)
arguments.extend(ARGUMENT_SPEC_REMAINING)
return arguments
@@ -56,6 +61,13 @@ def check_argument(name, result_name, value):
assert_in(result_name, vars(args))
+def check_argument_unary(name, result_name):
+ text = NamedTemporaryFile()
+
+ args = cli.parse_args(['--text', text.name, '--' + name])
+ assert_in(result_name, vars(args))
+
+
def check_argument_type(name, value):
text = NamedTemporaryFile()
@@ -71,7 +83,9 @@ def check_argument_type(name, value):
def test_parse_args_are_passed_along():
for option in all_arguments():
- if option.cli_name != 'mask':
+ if option.cli_name in ARGUMENT_CLI_NAMES_UNARY:
+ yield check_argument_unary, option.cli_name, option.init_name
+ elif option.cli_name != 'mask':
yield check_argument, option.cli_name, option.init_name, option.pass_value
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_issue_reference",
"has_media",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 2
} | 1.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"nose",
"mock",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc g++"
],
"python": "3.6",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
certifi==2021.5.30
cycler==0.11.0
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
kiwisolver==1.3.1
matplotlib==3.3.4
mock==5.2.0
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
nose==1.3.7
numpy==1.19.5
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
Pillow==8.4.0
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
python-dateutil==2.9.0.post0
six==1.17.0
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
-e git+https://github.com/amueller/word_cloud.git@2b868941a71e0ad6efac3b25433b97e8776e381b#egg=wordcloud
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: word_cloud
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib-metadata=4.8.1=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- cycler==0.11.0
- kiwisolver==1.3.1
- matplotlib==3.3.4
- mock==5.2.0
- nose==1.3.7
- numpy==1.19.5
- pillow==8.4.0
- python-dateutil==2.9.0.post0
- six==1.17.0
prefix: /opt/conda/envs/word_cloud
| [
"test/test_wordcloud_cli.py::test_main_passes_arguments_through"
]
| []
| [
"test/test_wordcloud.py::test_collocations",
"test/test_wordcloud.py::test_plurals_numbers",
"test/test_wordcloud.py::test_multiple_s",
"test/test_wordcloud.py::test_default",
"test/test_wordcloud.py::test_stopwords_lowercasing",
"test/test_wordcloud.py::test_writing_to_file",
"test/test_wordcloud.py::test_check_errors",
"test/test_wordcloud.py::test_recolor",
"test/test_wordcloud.py::test_random_state",
"test/test_wordcloud.py::test_mask",
"test/test_wordcloud.py::test_single_color_func",
"test/test_wordcloud.py::test_single_color_func_grey",
"test/test_wordcloud.py::test_process_text",
"test/test_wordcloud.py::test_generate_from_frequencies",
"test/test_wordcloud.py::test_relative_scaling_zero",
"test/test_wordcloud_cli.py::test_check_duplicate_color_error",
"test/test_wordcloud_cli.py::test_parse_args_defaults_to_random_color",
"test/test_wordcloud_cli.py::test_cli_writes_image"
]
| []
| MIT License | 1,107 | [
"wordcloud/wordcloud.py",
"wordcloud/wordcloud_cli.py"
]
| [
"wordcloud/wordcloud.py",
"wordcloud/wordcloud_cli.py"
]
|
Azure__azure-cli-2575 | 3b6324d00353405e547723056bb324fe3667731c | 2017-03-21 17:17:09 | 350bb060e1fddf437f12788afbeedaa0cf61a87f | codecov-io: # [Codecov](https://codecov.io/gh/Azure/azure-cli/pull/2575?src=pr&el=h1) Report
> Merging [#2575](https://codecov.io/gh/Azure/azure-cli/pull/2575?src=pr&el=desc) into [master](https://codecov.io/gh/Azure/azure-cli/commit/3b6324d00353405e547723056bb324fe3667731c?src=pr&el=desc) will **increase** coverage by `<.01%`.
> The diff coverage is `100%`.
```diff
@@ Coverage Diff @@
## master #2575 +/- ##
==========================================
+ Coverage 72.32% 72.32% +<.01%
==========================================
Files 363 363
Lines 19835 19838 +3
Branches 2923 2924 +1
==========================================
+ Hits 14345 14348 +3
Misses 4582 4582
Partials 908 908
```
| [Impacted Files](https://codecov.io/gh/Azure/azure-cli/pull/2575?src=pr&el=tree) | Coverage Δ | |
|---|---|---|
| [src/azure-cli-core/azure/cli/core/\_profile.py](https://codecov.io/gh/Azure/azure-cli/compare/3b6324d00353405e547723056bb324fe3667731c...3eb700a6096e68ea881880a9e976c43d060eeed2?src=pr&el=tree#diff-c3JjL2F6dXJlLWNsaS1jb3JlL2F6dXJlL2NsaS9jb3JlL19wcm9maWxlLnB5) | `84.19% <100%> (+0.13%)` | :arrow_up: |
------
[Continue to review full report at Codecov](https://codecov.io/gh/Azure/azure-cli/pull/2575?src=pr&el=continue).
> **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta)
> `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data`
> Powered by [Codecov](https://codecov.io/gh/Azure/azure-cli/pull/2575?src=pr&el=footer). Last update [3b6324d...3eb700a](https://codecov.io/gh/Azure/azure-cli/compare/3b6324d00353405e547723056bb324fe3667731c...3eb700a6096e68ea881880a9e976c43d060eeed2?el=footer&src=pr&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments). | diff --git a/src/azure-cli-core/azure/cli/core/_profile.py b/src/azure-cli-core/azure/cli/core/_profile.py
index 2312aa240..6c68bddcb 100644
--- a/src/azure-cli-core/azure/cli/core/_profile.py
+++ b/src/azure-cli-core/azure/cli/core/_profile.py
@@ -182,16 +182,22 @@ class Profile(object):
s[_IS_DEFAULT_SUBSCRIPTION] = False
if not new_active_one:
- new_active_one = new_subscriptions[0]
- new_active_one[_IS_DEFAULT_SUBSCRIPTION] = True
- default_sub_id = new_active_one[_SUBSCRIPTION_ID]
+ new_active_one = Profile._pick_working_subscription(new_subscriptions)
else:
- new_subscriptions[0][_IS_DEFAULT_SUBSCRIPTION] = True
- default_sub_id = new_subscriptions[0][_SUBSCRIPTION_ID]
+ new_active_one = Profile._pick_working_subscription(new_subscriptions)
+
+ new_active_one[_IS_DEFAULT_SUBSCRIPTION] = True
+ default_sub_id = new_active_one[_SUBSCRIPTION_ID]
set_cloud_subscription(active_cloud.name, default_sub_id)
self._storage[_SUBSCRIPTIONS] = subscriptions
+ @staticmethod
+ def _pick_working_subscription(subscriptions):
+ from azure.mgmt.resource.subscriptions.models import SubscriptionState
+ s = next((x for x in subscriptions if x['state'] == SubscriptionState.enabled.value), None)
+ return s or subscriptions[0]
+
def set_active_subscription(self, subscription): # take id or name
subscriptions = self.load_cached_subscriptions(all_clouds=True)
active_cloud = get_active_cloud()
| Login: do not set default on disabled subscriptions | Azure/azure-cli | diff --git a/src/azure-cli-core/tests/test_profile.py b/src/azure-cli-core/tests/test_profile.py
index 08ab8288f..2ce6494ed 100644
--- a/src/azure-cli-core/tests/test_profile.py
+++ b/src/azure-cli-core/tests/test_profile.py
@@ -165,6 +165,19 @@ class Test_Profile(unittest.TestCase): # pylint: disable=too-many-public-method
self.assertFalse(storage_mock['subscriptions'][1]['isDefault'])
self.assertTrue(storage_mock['subscriptions'][0]['isDefault'])
+ def test_default_active_subscription_to_non_disabled_one(self):
+ storage_mock = {'subscriptions': None}
+ profile = Profile(storage_mock)
+
+ subscriptions = profile._normalize_properties(
+ self.user2, [self.subscription2, self.subscription1], False)
+
+ profile._set_subscriptions(subscriptions)
+
+ # verify we skip the overdued subscription and default to the 2nd one in the list
+ self.assertEqual(storage_mock['subscriptions'][1]['name'], self.subscription1.display_name)
+ self.assertTrue(storage_mock['subscriptions'][1]['isDefault'])
+
def test_get_subscription(self):
storage_mock = {'subscriptions': None}
profile = Profile(storage_mock)
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 3,
"test_score": 0
},
"num_modified_files": 1
} | 0.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "python scripts/dev_setup.py",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | adal==0.4.3
applicationinsights==0.10.0
argcomplete==1.8.0
astroid==1.4.9
attrs==22.2.0
autopep8==1.2.4
azure-batch==2.0.0
-e git+https://github.com/Azure/azure-cli.git@3b6324d00353405e547723056bb324fe3667731c#egg=azure_cli&subdirectory=src/azure-cli
-e git+https://github.com/Azure/azure-cli.git@3b6324d00353405e547723056bb324fe3667731c#egg=azure_cli_acr&subdirectory=src/command_modules/azure-cli-acr
-e git+https://github.com/Azure/azure-cli.git@3b6324d00353405e547723056bb324fe3667731c#egg=azure_cli_acs&subdirectory=src/command_modules/azure-cli-acs
-e git+https://github.com/Azure/azure-cli.git@3b6324d00353405e547723056bb324fe3667731c#egg=azure_cli_appservice&subdirectory=src/command_modules/azure-cli-appservice
-e git+https://github.com/Azure/azure-cli.git@3b6324d00353405e547723056bb324fe3667731c#egg=azure_cli_batch&subdirectory=src/command_modules/azure-cli-batch
-e git+https://github.com/Azure/azure-cli.git@3b6324d00353405e547723056bb324fe3667731c#egg=azure_cli_cloud&subdirectory=src/command_modules/azure-cli-cloud
-e git+https://github.com/Azure/azure-cli.git@3b6324d00353405e547723056bb324fe3667731c#egg=azure_cli_component&subdirectory=src/command_modules/azure-cli-component
-e git+https://github.com/Azure/azure-cli.git@3b6324d00353405e547723056bb324fe3667731c#egg=azure_cli_configure&subdirectory=src/command_modules/azure-cli-configure
-e git+https://github.com/Azure/azure-cli.git@3b6324d00353405e547723056bb324fe3667731c#egg=azure_cli_container&subdirectory=src/command_modules/azure-cli-container
-e git+https://github.com/Azure/azure-cli.git@3b6324d00353405e547723056bb324fe3667731c#egg=azure_cli_core&subdirectory=src/azure-cli-core
-e git+https://github.com/Azure/azure-cli.git@3b6324d00353405e547723056bb324fe3667731c#egg=azure_cli_datalake&subdirectory=src/command_modules/azure-cli-datalake
-e git+https://github.com/Azure/azure-cli.git@3b6324d00353405e547723056bb324fe3667731c#egg=azure_cli_documentdb&subdirectory=src/command_modules/azure-cli-documentdb
-e git+https://github.com/Azure/azure-cli.git@3b6324d00353405e547723056bb324fe3667731c#egg=azure_cli_feedback&subdirectory=src/command_modules/azure-cli-feedback
-e git+https://github.com/Azure/azure-cli.git@3b6324d00353405e547723056bb324fe3667731c#egg=azure_cli_find&subdirectory=src/command_modules/azure-cli-find
-e git+https://github.com/Azure/azure-cli.git@3b6324d00353405e547723056bb324fe3667731c#egg=azure_cli_iot&subdirectory=src/command_modules/azure-cli-iot
-e git+https://github.com/Azure/azure-cli.git@3b6324d00353405e547723056bb324fe3667731c#egg=azure_cli_keyvault&subdirectory=src/command_modules/azure-cli-keyvault
-e git+https://github.com/Azure/azure-cli.git@3b6324d00353405e547723056bb324fe3667731c#egg=azure_cli_network&subdirectory=src/command_modules/azure-cli-network
-e git+https://github.com/Azure/azure-cli.git@3b6324d00353405e547723056bb324fe3667731c#egg=azure_cli_nspkg&subdirectory=src/azure-cli-nspkg
-e git+https://github.com/Azure/azure-cli.git@3b6324d00353405e547723056bb324fe3667731c#egg=azure_cli_profile&subdirectory=src/command_modules/azure-cli-profile
-e git+https://github.com/Azure/azure-cli.git@3b6324d00353405e547723056bb324fe3667731c#egg=azure_cli_redis&subdirectory=src/command_modules/azure-cli-redis
-e git+https://github.com/Azure/azure-cli.git@3b6324d00353405e547723056bb324fe3667731c#egg=azure_cli_resource&subdirectory=src/command_modules/azure-cli-resource
-e git+https://github.com/Azure/azure-cli.git@3b6324d00353405e547723056bb324fe3667731c#egg=azure_cli_role&subdirectory=src/command_modules/azure-cli-role
-e git+https://github.com/Azure/azure-cli.git@3b6324d00353405e547723056bb324fe3667731c#egg=azure_cli_sql&subdirectory=src/command_modules/azure-cli-sql
-e git+https://github.com/Azure/azure-cli.git@3b6324d00353405e547723056bb324fe3667731c#egg=azure_cli_storage&subdirectory=src/command_modules/azure-cli-storage
-e git+https://github.com/Azure/azure-cli.git@3b6324d00353405e547723056bb324fe3667731c#egg=azure_cli_taskhelp&subdirectory=src/command_modules/azure-cli-taskhelp
-e git+https://github.com/Azure/azure-cli.git@3b6324d00353405e547723056bb324fe3667731c#egg=azure_cli_testsdk&subdirectory=src/azure-cli-testsdk
-e git+https://github.com/Azure/azure-cli.git@3b6324d00353405e547723056bb324fe3667731c#egg=azure_cli_utility_automation&subdirectory=scripts
-e git+https://github.com/Azure/azure-cli.git@3b6324d00353405e547723056bb324fe3667731c#egg=azure_cli_vm&subdirectory=src/command_modules/azure-cli-vm
azure-common==1.1.4
azure-core==1.24.2
azure-datalake-store==0.0.5
azure-graphrbac==0.30.0rc6
azure-keyvault==0.1.0
azure-mgmt-authorization==0.30.0rc6
azure-mgmt-batch==3.0.0
azure-mgmt-compute==0.33.1rc1
azure-mgmt-containerregistry==0.2.0
azure-mgmt-datalake-analytics==0.1.3
azure-mgmt-datalake-nspkg==3.0.1
azure-mgmt-datalake-store==0.1.3
azure-mgmt-dns==1.0.0
azure-mgmt-documentdb==0.1.0
azure-mgmt-iothub==0.2.1
azure-mgmt-keyvault==0.30.0
azure-mgmt-network==0.30.0
azure-mgmt-nspkg==3.0.2
azure-mgmt-redis==1.0.0
azure-mgmt-resource==0.30.2
azure-mgmt-sql==0.3.2
azure-mgmt-storage==0.31.0
azure-mgmt-trafficmanager==0.30.0rc6
azure-mgmt-web==0.31.0
azure-nspkg==3.0.2
azure-storage==0.33.0
certifi==2021.5.30
cffi==1.15.1
colorama==0.3.7
coverage==4.2
cryptography==40.0.2
flake8==3.2.1
importlib-metadata==4.8.3
iniconfig==1.1.1
isodate==0.7.0
jeepney==0.7.1
jmespath==0.10.0
keyring==23.4.1
lazy-object-proxy==1.7.1
mccabe==0.5.3
mock==1.3.0
msrest==0.4.29
msrestazure==0.4.34
nose==1.3.7
oauthlib==3.2.2
packaging==21.3
paramiko==2.0.2
pbr==6.1.1
pep8==1.7.1
pluggy==1.0.0
py==1.11.0
pyasn1==0.5.1
pycodestyle==2.2.0
pycparser==2.21
pyflakes==1.3.0
Pygments==2.1.3
PyJWT==2.4.0
pylint==1.5.4
pyOpenSSL==16.2.0
pyparsing==3.1.4
pytest==7.0.1
python-dateutil==2.9.0.post0
PyYAML==3.11
requests==2.9.1
requests-oauthlib==2.0.0
scp==0.15.0
SecretStorage==3.3.3
six==1.10.0
sshtunnel==0.4.0
tabulate==0.7.5
tomli==1.2.3
typing-extensions==4.1.1
urllib3==1.16
vcrpy==1.10.3
Whoosh==2.7.4
wrapt==1.16.0
xmltodict==0.14.2
zipp==3.6.0
| name: azure-cli
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- adal==0.4.3
- applicationinsights==0.10.0
- argcomplete==1.8.0
- astroid==1.4.9
- attrs==22.2.0
- autopep8==1.2.4
- azure-batch==2.0.0
- azure-common==1.1.4
- azure-core==1.24.2
- azure-datalake-store==0.0.5
- azure-graphrbac==0.30.0rc6
- azure-keyvault==0.1.0
- azure-mgmt-authorization==0.30.0rc6
- azure-mgmt-batch==3.0.0
- azure-mgmt-compute==0.33.1rc1
- azure-mgmt-containerregistry==0.2.0
- azure-mgmt-datalake-analytics==0.1.3
- azure-mgmt-datalake-nspkg==3.0.1
- azure-mgmt-datalake-store==0.1.3
- azure-mgmt-dns==1.0.0
- azure-mgmt-documentdb==0.1.0
- azure-mgmt-iothub==0.2.1
- azure-mgmt-keyvault==0.30.0
- azure-mgmt-network==0.30.0
- azure-mgmt-nspkg==3.0.2
- azure-mgmt-redis==1.0.0
- azure-mgmt-resource==0.30.2
- azure-mgmt-sql==0.3.2
- azure-mgmt-storage==0.31.0
- azure-mgmt-trafficmanager==0.30.0rc6
- azure-mgmt-web==0.31.0
- azure-nspkg==3.0.2
- azure-storage==0.33.0
- cffi==1.15.1
- colorama==0.3.7
- coverage==4.2
- cryptography==40.0.2
- flake8==3.2.1
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- isodate==0.7.0
- jeepney==0.7.1
- jmespath==0.10.0
- keyring==23.4.1
- lazy-object-proxy==1.7.1
- mccabe==0.5.3
- mock==1.3.0
- msrest==0.4.29
- msrestazure==0.4.34
- nose==1.3.7
- oauthlib==3.2.2
- packaging==21.3
- paramiko==2.0.2
- pbr==6.1.1
- pep8==1.7.1
- pip==9.0.1
- pluggy==1.0.0
- py==1.11.0
- pyasn1==0.5.1
- pycodestyle==2.2.0
- pycparser==2.21
- pyflakes==1.3.0
- pygments==2.1.3
- pyjwt==2.4.0
- pylint==1.5.4
- pyopenssl==16.2.0
- pyparsing==3.1.4
- pytest==7.0.1
- python-dateutil==2.9.0.post0
- pyyaml==3.11
- requests==2.9.1
- requests-oauthlib==2.0.0
- scp==0.15.0
- secretstorage==3.3.3
- setuptools==30.4.0
- six==1.10.0
- sshtunnel==0.4.0
- tabulate==0.7.5
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.16
- vcrpy==1.10.3
- whoosh==2.7.4
- wrapt==1.16.0
- xmltodict==0.14.2
- zipp==3.6.0
prefix: /opt/conda/envs/azure-cli
| [
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_default_active_subscription_to_non_disabled_one"
]
| [
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_find_subscriptions_from_service_principal_using_cert",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_service_principal_auth_client_cert"
]
| [
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_create_token_cache",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_credscache_add_new_sp_creds",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_credscache_load_tokens_and_sp_creds_with_cert",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_credscache_load_tokens_and_sp_creds_with_secret",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_credscache_new_token_added_by_adal",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_credscache_remove_creds",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_find_subscriptions_from_particular_tenent",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_find_subscriptions_from_service_principal_id",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_find_subscriptions_interactive_from_particular_tenent",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_find_subscriptions_through_interactive_flow",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_find_subscriptions_thru_username_password",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_get_current_account_user",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_get_expanded_subscription_info",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_get_login_credentials",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_get_login_credentials_for_graph_client",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_get_subscription",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_load_cached_tokens",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_logout",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_logout_all",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_normalize",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_service_principal_auth_client_secret",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_set_active_subscription",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_update_add_two_different_subscriptions",
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_update_with_same_subscription_added_twice"
]
| [
"src/azure-cli-core/tests/test_profile.py::Test_Profile::test_get_expanded_subscription_info_for_logged_in_service_principal"
]
| MIT License | 1,108 | [
"src/azure-cli-core/azure/cli/core/_profile.py"
]
| [
"src/azure-cli-core/azure/cli/core/_profile.py"
]
|
CheetahTemplate3__cheetah3-2 | 01eaf9e0d4e319d465443d50ad850b849d53b760 | 2017-03-21 19:13:17 | 01eaf9e0d4e319d465443d50ad850b849d53b760 | diff --git a/Cheetah/Compiler.py b/Cheetah/Compiler.py
index f1b392d..5629272 100644
--- a/Cheetah/Compiler.py
+++ b/Cheetah/Compiler.py
@@ -1744,11 +1744,12 @@ class ModuleCompiler(SettingsManager, GenUtils):
# - We also assume that the final . separates the classname from the
# module name. This might break if people do something really fancy
# with their dots and namespaces.
- baseclasses = baseClassName.split(',')
- for klass in baseclasses:
+ baseclasses = []
+ for klass in baseClassName.split(','):
+ klass = klass.strip()
chunks = klass.split('.')
if len(chunks)==1:
- self._getActiveClassCompiler().setBaseClass(klass)
+ baseclasses.append(klass)
if klass not in self.importedVarNames():
modName = klass
# we assume the class name to be the module name
@@ -1763,7 +1764,7 @@ class ModuleCompiler(SettingsManager, GenUtils):
if modName in self.importedVarNames():
needToAddImport = False
finalBaseClassName = klass.replace(modName+'.', '')
- self._getActiveClassCompiler().setBaseClass(finalBaseClassName)
+ baseclasses.append(finalBaseClassName)
break
else:
modName += '.'+chunk
@@ -1773,11 +1774,13 @@ class ModuleCompiler(SettingsManager, GenUtils):
if finalClassName != chunks[-2]:
# we assume the class name to be the module name
modName = '.'.join(chunks)
- self._getActiveClassCompiler().setBaseClass(finalClassName)
+ baseclasses.append(finalClassName)
importStatement = "from %s import %s" % (modName, finalClassName)
self.addImportStatement(importStatement)
self.addImportedVarNames( [finalClassName,] )
-
+
+ self._getActiveClassCompiler().setBaseClass(', '.join(baseclasses))
+
def setCompilerSetting(self, key, valueExpr):
self.setSetting(key, eval(valueExpr) )
self._parser.configureParser()
| Failing to catch an Exception
Hello,
The following `except` line is expected to be reached as the file `expect_exception_catch` doesn't exist. Instead, an `indentation error` is raised which was not happening at some point, but even then the ImportError was not catching either. I think this whole area has been broken since 2, so a legacy issue.
```
#try
#from lib.expect_exception_catch import as_should_be_ImportError
#except ImportError
#pass
#end try
```
As a test, the above code is at the top of the template file, and the error is...
```
except ImportError: # generated from line 3, col 1
^
IndentationError: expected an indented block
```
Thanks for picking up Cheetah
p.s. please keep pypi release build binaries in sync with the git master branch for the best feedback loop | CheetahTemplate3/cheetah3 | diff --git a/Cheetah/Tests/Boinker.py b/Cheetah/Tests/Boinker.py
new file mode 100644
index 0000000..5f99bc5
--- /dev/null
+++ b/Cheetah/Tests/Boinker.py
@@ -0,0 +1,8 @@
+from __future__ import absolute_import
+
+from Cheetah.Template import Template
+
+
+class Boinker(Template):
+ def boink(self):
+ return [1, 2, 3]
diff --git a/Cheetah/Tests/Pinger.py b/Cheetah/Tests/Pinger.py
new file mode 100644
index 0000000..6b8a488
--- /dev/null
+++ b/Cheetah/Tests/Pinger.py
@@ -0,0 +1,8 @@
+from __future__ import absolute_import
+
+from Cheetah.Template import Template
+
+
+class Pinger(Template):
+ def ping(self):
+ return 'pong'
diff --git a/Cheetah/Tests/Template.py b/Cheetah/Tests/Template.py
index 87d1854..f21949a 100755
--- a/Cheetah/Tests/Template.py
+++ b/Cheetah/Tests/Template.py
@@ -316,25 +316,22 @@ class StaticMethodSupport(TemplateTest):
except AttributeError as ex:
self.fail(ex)
-class Useless(object):
- def boink(self):
- return [1, 2, 3]
class MultipleInheritanceSupport(TemplateTest):
def runTest(self):
template = '''
- #extends Template, Useless
+ #extends Cheetah.Tests.Boinker, Cheetah.Tests.Pinger
#def foo()
#return [4,5] + $boink()
#end def
'''
- template = Template.compile(template,
- moduleGlobals={'Useless' : Useless},
- compilerSettings={'autoImportForExtendsDirective' : False})
+
+ template = Template.compile(template)
template = template()
result = template.foo()
assert result == [4, 5, 1, 2, 3], (result, 'Unexpected result')
+
class SubclassSearchListTest(TemplateTest):
'''
Verify that if we subclass Template, we can still
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 1
} | 2.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"Markdown>=2.0.1",
"pygments",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | -e git+https://github.com/CheetahTemplate3/cheetah3.git@01eaf9e0d4e319d465443d50ad850b849d53b760#egg=CT3
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
importlib_metadata==8.6.1
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
Markdown==3.7
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
Pygments==2.19.1
pytest @ file:///croot/pytest_1738938843180/work
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
zipp==3.21.0
| name: cheetah3
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- importlib-metadata==8.6.1
- markdown==3.7
- pygments==2.19.1
- zipp==3.21.0
prefix: /opt/conda/envs/cheetah3
| [
"Cheetah/Tests/Template.py::MultipleInheritanceSupport::runTest"
]
| []
| [
"Cheetah/Tests/Template.py::ClassMethods_compile::test_baseclassArg",
"Cheetah/Tests/Template.py::ClassMethods_compile::test_basicUsage",
"Cheetah/Tests/Template.py::ClassMethods_compile::test_classNameArg",
"Cheetah/Tests/Template.py::ClassMethods_compile::test_compilationCache",
"Cheetah/Tests/Template.py::ClassMethods_compile::test_keepRefToGeneratedCodeArg",
"Cheetah/Tests/Template.py::ClassMethods_compile::test_mainMethodNameArg",
"Cheetah/Tests/Template.py::ClassMethods_compile::test_moduleFileCaching",
"Cheetah/Tests/Template.py::ClassMethods_compile::test_moduleGlobalsArg",
"Cheetah/Tests/Template.py::ClassMethods_compile::test_moduleNameArg",
"Cheetah/Tests/Template.py::ClassMethods_subclass::test_basicUsage",
"Cheetah/Tests/Template.py::Preprocessors::test_basicUsage1",
"Cheetah/Tests/Template.py::Preprocessors::test_complexUsage",
"Cheetah/Tests/Template.py::Preprocessors::test_i18n",
"Cheetah/Tests/Template.py::Preprocessors::test_normalizePreprocessorArgVariants",
"Cheetah/Tests/Template.py::TryExceptImportTest::test_FailCase",
"Cheetah/Tests/Template.py::ClassMethodSupport::test_BasicDecorator",
"Cheetah/Tests/Template.py::StaticMethodSupport::test_BasicDecorator",
"Cheetah/Tests/Template.py::SubclassSearchListTest::runTest"
]
| []
| MIT License | 1,109 | [
"Cheetah/Compiler.py"
]
| [
"Cheetah/Compiler.py"
]
|
|
rm-hull__luma.core-62 | 1bcb8932cb9776a5bc72cc0be307e20c624eedbb | 2017-03-22 00:07:01 | 3a09a197b7167dacad43f644c55d027036459d53 | thijstriemstra: work in progress..
thijstriemstra: I'm gonna add a few more tests but it's basically ready for review @rm-hull | diff --git a/.travis.yml b/.travis.yml
index 34d7fbe..b53e76e 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -16,8 +16,6 @@ matrix:
env: TOXENV=py37
- python: 2.7
env: TOXENV=qa
- allow_failures:
- - python: 3.7-dev
addons:
apt:
diff --git a/doc/api-documentation.rst b/doc/api-documentation.rst
index a5807de..01fbfbc 100644
--- a/doc/api-documentation.rst
+++ b/doc/api-documentation.rst
@@ -86,6 +86,13 @@ API Documentation
:undoc-members:
:show-inheritance:
+:mod:`luma.core.util`
+""""""""""""""""""""""""
+.. automodule:: luma.core.util
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
:mod:`luma.core.virtual`
""""""""""""""""""""""""
.. automodule:: luma.core.virtual
diff --git a/luma/core/device.py b/luma/core/device.py
index 112752c..ef3aad2 100644
--- a/luma/core/device.py
+++ b/luma/core/device.py
@@ -23,7 +23,7 @@ class device(mixin.capabilities):
self._const = const or luma.core.const.common
self._serial_interface = serial_interface or i2c()
- def shutdown_hook():
+ def shutdown_hook(): # pragma: no cover
try:
self.cleanup()
except:
diff --git a/luma/core/lib.py b/luma/core/lib.py
index 921b074..ad79da7 100644
--- a/luma/core/lib.py
+++ b/luma/core/lib.py
@@ -8,7 +8,7 @@ import luma.core.error
__all__ = ["rpi_gpio", "spidev"]
-def __spidev__(self):
+def __spidev__(self): # pragma: no cover
# spidev cant compile on macOS, so use a similar technique to
# initialize (mainly so the tests run unhindered)
import spidev
@@ -19,7 +19,7 @@ def __rpi_gpio__(self):
# RPi.GPIO _really_ doesn't like being run on anything other than
# a Raspberry Pi... this is imported here so we can swap out the
# implementation for a mock
- try:
+ try: # pragma: no cover
import RPi.GPIO as GPIO
GPIO.setmode(GPIO.BCM)
return GPIO
diff --git a/luma/core/serial.py b/luma/core/serial.py
index dbfbb65..c5e631c 100644
--- a/luma/core/serial.py
+++ b/luma/core/serial.py
@@ -225,8 +225,8 @@ class noop(object):
def __getattr__(self, attr):
return self.__noop
- def __setattr__(self, attr, val):
+ def __setattr__(self, attr, val): # pragma: no cover
pass
- def __noop(*args, **kwargs):
+ def __noop(self, *args, **kwargs):
pass
diff --git a/luma/core/sprite_system.py b/luma/core/sprite_system.py
index 4b57fd9..a0c83cf 100755
--- a/luma/core/sprite_system.py
+++ b/luma/core/sprite_system.py
@@ -220,6 +220,8 @@ class framerate_regulator(object):
:returns: the effective frame rate
:rtype: float
"""
+ if self.start_time is None:
+ self.start_time = 0
elapsed = monotonic() - self.start_time
return self.called / elapsed
diff --git a/luma/core/threadpool.py b/luma/core/threadpool.py
index 331a235..285c01f 100644
--- a/luma/core/threadpool.py
+++ b/luma/core/threadpool.py
@@ -21,10 +21,7 @@ class worker(Thread):
def run(self):
while True:
func, args, kargs = self.tasks.get()
- try:
- func(*args, **kargs)
- except Exception as e:
- print(e)
+ func(*args, **kargs)
self.tasks.task_done()
diff --git a/luma/core/util.py b/luma/core/util.py
index 2393b0c..229344d 100644
--- a/luma/core/util.py
+++ b/luma/core/util.py
@@ -5,17 +5,23 @@
import os
import sys
import time
+import inspect
import warnings
+import argparse
+import importlib
import ctypes.util
+from collections import OrderedDict
-__all__ = ["deprecation", "monotonic"]
+__all__ = ["deprecation", "monotonic", "get_choices", "get_supported_libraries",
+ "get_interface_types", "get_display_types", "get_transformer_choices",
+ "load_config", "make_serial", "create_device", "create_parser"]
try:
# only available since python 3.3
monotonic = time.monotonic
-except AttributeError:
+except AttributeError: # pragma: no cover
if sys.platform == 'darwin': # OS X, iOS
libc = ctypes.CDLL('/usr/lib/libc.dylib', use_errno=True)
@@ -64,3 +70,196 @@ except AttributeError:
def deprecation(message):
warnings.warn(message, DeprecationWarning, stacklevel=2)
+
+
+def get_choices(module_name):
+ """
+ Retrieve members from ``module_name``'s ``__all__`` list.
+
+ :rtype: list
+ """
+ try:
+ module = importlib.import_module(module_name)
+ if hasattr(module, '__all__'):
+ return module.__all__
+ else:
+ return [name for name, _ in inspect.getmembers(module,
+ inspect.isclass)]
+ except ImportError:
+ return []
+
+
+def get_supported_libraries():
+ """
+ Get list of supported libraries for the parser.
+
+ :rtype: list
+ """
+ return ['oled', 'lcd', 'led_matrix', 'emulator']
+
+
+def get_interface_types():
+ """
+ Get list of available interface types, e.g. ``['spi', 'i2c']``.
+
+ :rtype: list
+ """
+ return get_choices('luma.core.serial')
+
+
+def get_display_types():
+ """
+ Get ordered dict containg available display types from available luma
+ sub-projects.
+
+ :rtype: OrderedDict
+ """
+ display_types = OrderedDict()
+ for namespace in get_supported_libraries():
+ display_types[namespace] = get_choices('luma.{0}.device'.format(
+ namespace))
+
+ return display_types
+
+
+def get_transformer_choices():
+ """
+ :rtype: list
+ """
+ from luma.emulator.render import transformer
+ return [fn for fn in dir(transformer) if fn[0:2] != "__"]
+
+
+def load_config(path):
+ """
+ Load device configuration from file path and return list with parsed lines.
+
+ :param path: Location of configuration file.
+ :type path: str
+ :rtype: list
+ """
+ args = []
+ with open(path, 'r') as fp:
+ for line in fp.readlines():
+ if line.strip() and not line.startswith("#"):
+ args.append(line.replace("\n", ""))
+
+ return args
+
+
+class make_serial(object):
+ """
+ Serial factory.
+ """
+ def __init__(self, opts, gpio=None):
+ self.opts = opts
+ self.gpio = gpio
+
+ def i2c(self):
+ from luma.core.serial import i2c
+ return i2c(port=self.opts.i2c_port, address=self.opts.i2c_address)
+
+ def spi(self):
+ from luma.core.serial import spi
+ return spi(port=self.opts.spi_port,
+ device=self.opts.spi_device,
+ bus_speed_hz=self.opts.spi_bus_speed,
+ gpio_DC=self.opts.gpio_data_command,
+ gpio_RST=self.opts.gpio_reset,
+ gpio=self.gpio)
+
+
+def create_device(args, dtypes=None):
+ """
+ Create and return device.
+ """
+ device = None
+ if dtypes is None:
+ dtypes = get_display_types()
+
+ if args.display in dtypes.get('oled'):
+ import luma.oled.device
+ Device = getattr(luma.oled.device, args.display)
+ Serial = getattr(make_serial(args), args.interface)
+ device = Device(Serial(), **vars(args))
+
+ elif args.display in dtypes.get('lcd'):
+ import luma.lcd.device
+ import luma.lcd.aux
+ Device = getattr(luma.lcd.device, args.display)
+ Serial = getattr(make_serial(args), args.interface)
+ luma.lcd.aux.backlight(gpio_LIGHT=args.gpio_backlight).enable(True)
+ device = Device(Serial(), **vars(args))
+
+ elif args.display in dtypes.get('led_matrix'):
+ import luma.led_matrix.device
+ from luma.core.serial import noop
+ Device = getattr(luma.led_matrix.device, args.display)
+ Serial = make_serial(args, gpio=noop()).spi
+ device = Device(serial_interface=Serial(), **vars(args))
+
+ elif args.display in dtypes.get('emulator'):
+ import luma.emulator.device
+ Device = getattr(luma.emulator.device, args.display)
+ device = Device(**vars(args))
+
+ return device
+
+
+def create_parser(description):
+ """
+ Create and return command-line argument parser.
+ """
+ parser = argparse.ArgumentParser(description=description,
+ formatter_class=argparse.ArgumentDefaultsHelpFormatter)
+
+ display_types = get_display_types()
+ display_choices = [display for k, v in display_types.items() for display in v]
+ interface_types = get_interface_types()
+ framebuffer_choices = get_choices("luma.core.framebuffer")
+
+ general_group = parser.add_argument_group('General')
+ general_group.add_argument('--config', '-f', type=str, help='Load configuration settings from a file')
+ general_group.add_argument('--display', '-d', type=str, default=display_choices[0], help='Display type, supports real devices or emulators', choices=display_choices)
+ general_group.add_argument('--width', type=int, default=128, help='Width of the device in pixels')
+ general_group.add_argument('--height', type=int, default=64, help='Height of the device in pixels')
+ general_group.add_argument('--rotate', '-r', type=int, default=0, help='Rotation factor', choices=[0, 1, 2, 3])
+ general_group.add_argument('--interface', '-i', type=str, default=interface_types[0], help='Serial interface type', choices=interface_types)
+
+ i2c_group = parser.add_argument_group('I2C')
+ i2c_group.add_argument('--i2c-port', type=int, default=1, help='I2C bus number')
+ i2c_group.add_argument('--i2c-address', type=str, default='0x3C', help='I2C display address')
+
+ spi_group = parser.add_argument_group('SPI')
+ spi_group.add_argument('--spi-port', type=int, default=0, help='SPI port number')
+ spi_group.add_argument('--spi-device', type=int, default=0, help='SPI device')
+ spi_group.add_argument('--spi-bus-speed', type=int, default=8000000, help='SPI max bus speed (Hz)')
+
+ gpio_group = parser.add_argument_group('GPIO')
+ gpio_group.add_argument('--gpio-data-command', type=int, default=24, help='GPIO pin for D/C RESET (SPI devices only)')
+ gpio_group.add_argument('--gpio-reset', type=int, default=25, help='GPIO pin for RESET (SPI devices only)')
+ gpio_group.add_argument('--gpio-backlight', type=int, default=18, help='GPIO pin for backlight (PCD8544 devices only)')
+
+ misc_group = parser.add_argument_group('Misc')
+ misc_group.add_argument('--block-orientation', type=str, default='horizontal', help='Fix 90° phase error (MAX7219 LED matrix only)', choices=['horizontal', 'vertical'])
+ misc_group.add_argument('--mode', type=str, default='RGB', help='Colour mode (SSD1322, SSD1325 and emulator only)', choices=['1', 'RGB', 'RGBA'])
+ misc_group.add_argument('--framebuffer', type=str, default=framebuffer_choices[0], help='Framebuffer implementation (SSD1331, SSD1322, ST7735 displays only)', choices=framebuffer_choices)
+ misc_group.add_argument('--bgr', type=bool, default=False, help='Set to True if LCD pixels laid out in BGR (ST7735 displays only)', choices=[True, False])
+
+ if len(display_types["emulator"]) > 0:
+ transformer_choices = get_transformer_choices()
+
+ emulator_group = parser.add_argument_group('Emulator')
+ emulator_group.add_argument('--transform', type=str, default='scale2x', help='Scaling transform to apply (emulator only)', choices=transformer_choices)
+ emulator_group.add_argument('--scale', type=int, default=2, help='Scaling factor to apply (emulator only)')
+ emulator_group.add_argument('--duration', type=float, default=0.01, help='Animation frame duration (gifanim emulator only)')
+ emulator_group.add_argument('--loop', type=int, default=0, help='Repeat loop, zero=forever (gifanim emulator only)')
+ emulator_group.add_argument('--max-frames', type=int, help='Maximum frames to record (gifanim emulator only)')
+
+ try: # pragma: no cover
+ import argcomplete
+ argcomplete.autocomplete(parser)
+ except ImportError:
+ pass
+
+ return parser
diff --git a/setup.py b/setup.py
index 26fd387..ad3cb16 100644
--- a/setup.py
+++ b/setup.py
@@ -43,7 +43,7 @@ setup(
tests_require=test_deps,
extras_require={
'docs': [
- 'sphinx >= 1.5.3'
+ 'sphinx >= 1.5.1'
],
'test': test_deps,
'qa': [
| add argparser utils from luma.examples
Being able to reuse the demo_opts, argparser code, in luma.examples would be beneficial for sub-projects. You would be able to use the parser code to create a command-line script that handles all compatible luma libraries.
I'm not sure if it belongs in core or a new luma.commandline-something llibrary. Thoughts? | rm-hull/luma.core | diff --git a/tests/helpers.py b/tests/helpers.py
index cf41d46..1fd014d 100644
--- a/tests/helpers.py
+++ b/tests/helpers.py
@@ -9,9 +9,18 @@ Test helpers.
import os.path
+try:
+ from unittest.mock import patch, call, Mock
+except ImportError:
+ from mock import patch, call, Mock # noqa: F401
-def get_reference_image(fname):
+
+def get_reference_file(fname):
return os.path.abspath(os.path.join(
os.path.dirname(__file__),
'reference',
fname))
+
+
+def get_reference_image(fname):
+ return get_reference_file(os.path.join('images', fname))
diff --git a/tests/reference/config-test.txt b/tests/reference/config-test.txt
new file mode 100644
index 0000000..f55ed7e
--- /dev/null
+++ b/tests/reference/config-test.txt
@@ -0,0 +1,4 @@
+--display=capture
+--width=800
+--height=8600
+--spi-bus-speed=16000000
diff --git a/tests/reference/alt_colors.png b/tests/reference/images/alt_colors.png
similarity index 100%
rename from tests/reference/alt_colors.png
rename to tests/reference/images/alt_colors.png
diff --git a/tests/reference/ansi_colors.png b/tests/reference/images/ansi_colors.png
similarity index 100%
rename from tests/reference/ansi_colors.png
rename to tests/reference/images/ansi_colors.png
diff --git a/tests/reference/control_chars.png b/tests/reference/images/control_chars.png
similarity index 100%
rename from tests/reference/control_chars.png
rename to tests/reference/images/control_chars.png
diff --git a/tests/reference/dither.png b/tests/reference/images/dither.png
similarity index 100%
rename from tests/reference/dither.png
rename to tests/reference/images/dither.png
diff --git a/tests/reference/hotspot.png b/tests/reference/images/hotspot.png
similarity index 100%
rename from tests/reference/hotspot.png
rename to tests/reference/images/hotspot.png
diff --git a/tests/reference/portrait.png b/tests/reference/images/portrait.png
similarity index 100%
rename from tests/reference/portrait.png
rename to tests/reference/images/portrait.png
diff --git a/tests/reference/quick_brown_fox.png b/tests/reference/images/quick_brown_fox.png
similarity index 100%
rename from tests/reference/quick_brown_fox.png
rename to tests/reference/images/quick_brown_fox.png
diff --git a/tests/reference/runner.png b/tests/reference/images/runner.png
similarity index 100%
rename from tests/reference/runner.png
rename to tests/reference/images/runner.png
diff --git a/tests/reference/scroll_text.png b/tests/reference/images/scroll_text.png
similarity index 100%
rename from tests/reference/scroll_text.png
rename to tests/reference/images/scroll_text.png
diff --git a/tests/reference/set_position.png b/tests/reference/images/set_position.png
similarity index 100%
rename from tests/reference/set_position.png
rename to tests/reference/images/set_position.png
diff --git a/tests/reference/tab_align.png b/tests/reference/images/tab_align.png
similarity index 100%
rename from tests/reference/tab_align.png
rename to tests/reference/images/tab_align.png
diff --git a/tests/test_framerate_regulator.py b/tests/test_framerate_regulator.py
index e89ef74..1919869 100644
--- a/tests/test_framerate_regulator.py
+++ b/tests/test_framerate_regulator.py
@@ -7,8 +7,6 @@
Tests for the :py:class:`luma.core.sprite_system.framerate_regulator` class.
"""
-import pytest
-
from luma.core.util import monotonic
from luma.core.sprite_system import framerate_regulator
@@ -66,11 +64,11 @@ def test_sleep():
def test_effective_FPS():
regulator = framerate_regulator(fps=30)
- with pytest.raises(TypeError):
- regulator.effective_FPS()
+ assert regulator.effective_FPS() == 0
def test_average_transit_time():
regulator = framerate_regulator(fps=30)
- with pytest.raises(ZeroDivisionError):
- regulator.average_transit_time()
+ with regulator:
+ pass
+ assert regulator.average_transit_time() > 0
diff --git a/tests/test_legacy.py b/tests/test_legacy.py
index e651dfa..8266148 100644
--- a/tests/test_legacy.py
+++ b/tests/test_legacy.py
@@ -3,16 +3,12 @@
# Copyright (c) 2014-17 Richard Hull and contributors
# See LICENSE.rst for details.
-
-try:
- from unittest.mock import call, Mock
-except ImportError:
- from mock import call, Mock
-
from luma.core.device import dummy
from luma.core.legacy import text, textsize, show_message
from luma.core.legacy.font import proportional, CP437_FONT, LCD_FONT
+from helpers import Mock, call
+
def test_textsize():
"""
diff --git a/tests/test_serial.py b/tests/test_serial.py
index f9e237c..a708510 100644
--- a/tests/test_serial.py
+++ b/tests/test_serial.py
@@ -9,16 +9,14 @@ Tests for the :py:mod:`luma.core.serial` module.
import errno
-try:
- from unittest.mock import patch, call, Mock
-except ImportError:
- from mock import patch, call, Mock
-
import pytest
import smbus2
from luma.core.serial import i2c, spi
import luma.core.error
+from helpers import Mock, patch, call
+
+
smbus = Mock(unsafe=True)
spidev = Mock(unsafe=True)
gpio = Mock(unsafe=True)
diff --git a/tests/test_spritesheet.py b/tests/test_spritesheet.py
index fce267a..c533923 100644
--- a/tests/test_spritesheet.py
+++ b/tests/test_spritesheet.py
@@ -14,7 +14,7 @@ from luma.core.sprite_system import spritesheet
data = {
- 'image': "tests/reference/runner.png",
+ 'image': "tests/reference/images/runner.png",
'frames': {
'width': 64,
'height': 67,
diff --git a/tests/test_util.py b/tests/test_util.py
new file mode 100644
index 0000000..adb27f1
--- /dev/null
+++ b/tests/test_util.py
@@ -0,0 +1,87 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2017 Richard Hull and contributors
+# See LICENSE.rst for details.
+
+"""
+Tests for the :py:mod:`luma.core.util` module.
+"""
+
+import sys
+
+import pytest
+
+from luma.core import util
+from luma.core.serial import __all__ as iface_types
+
+from helpers import get_reference_file, patch, Mock
+
+
+test_config_file = get_reference_file('config-test.txt')
+
+
+def test_deprecation():
+ """
+ ``deprecated`` creates a DeprecationWarning.
+ """
+ class DeprecatedClass(object):
+ def __init__(self):
+ self.msg = 'Deprecated; will be removed in 0.0.1'
+ util.deprecation(self.msg)
+
+ with pytest.deprecated_call() as c:
+ d = DeprecatedClass()
+ assert str(c.list[0].message) == d.msg
+
+
+def test_get_interface_types():
+ """
+ Enumerate interface types.
+ """
+ assert util.get_interface_types() == iface_types
+
+
+def test_get_display_types():
+ """
+ Enumerate display types.
+ """
+ assert list(util.get_display_types().keys()) == util.get_supported_libraries()
+
+
+def test_get_choices_unknown_module():
+ """
+ get_choices returns an empty list when trying to inspect an unknown module.
+ """
+ result = util.get_choices('foo')
+ assert result == []
+
+
+def test_load_config_file_parse():
+ """
+ load_config parses a text file and returns a list of arguments.
+ """
+ result = util.load_config(test_config_file)
+ assert result == [
+ '--display=capture',
+ '--width=800',
+ '--height=8600',
+ '--spi-bus-speed=16000000'
+ ]
+
+
+def test_create_parser():
+ """
+ create_parser returns an argument parser instance.
+ """
+ sys.modules['luma.emulator'] = Mock()
+ sys.modules['luma.emulator.render'] = Mock()
+
+ with patch('luma.core.util.get_display_types') as mocka:
+ mocka.return_value = {
+ 'foo': ['a', 'b'],
+ 'bar': ['c', 'd'],
+ 'emulator': ['e', 'f']
+ }
+ parser = util.create_parser(description='test')
+ args = parser.parse_args(['-f', test_config_file])
+ assert args.config == test_config_file
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 3,
"issue_text_score": 0,
"test_score": 1
},
"num_modified_files": 9
} | 0.6 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-warnings"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc i2c-tools"
],
"python": "3.6",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
coverage==6.2
importlib-metadata==4.8.3
iniconfig==1.1.1
-e git+https://github.com/rm-hull/luma.core.git@1bcb8932cb9776a5bc72cc0be307e20c624eedbb#egg=luma.core
packaging==21.3
Pillow==8.4.0
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
pytest-cov==4.0.0
pytest-warnings==0.3.1
RPi.GPIO==0.7.1
smbus2==0.5.0
spidev==3.5
tomli==1.2.3
typing_extensions==4.1.1
zipp==3.6.0
| name: luma.core
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- coverage==6.2
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- packaging==21.3
- pillow==8.4.0
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-cov==4.0.0
- pytest-warnings==0.3.1
- rpi-gpio==0.7.1
- smbus2==0.5.0
- spidev==3.5
- tomli==1.2.3
- typing-extensions==4.1.1
- zipp==3.6.0
prefix: /opt/conda/envs/luma.core
| [
"tests/test_framerate_regulator.py::test_effective_FPS",
"tests/test_util.py::test_get_interface_types",
"tests/test_util.py::test_get_display_types",
"tests/test_util.py::test_get_choices_unknown_module",
"tests/test_util.py::test_load_config_file_parse",
"tests/test_util.py::test_create_parser"
]
| [
"tests/test_serial.py::test_i2c_init_device_permission_error"
]
| [
"tests/test_framerate_regulator.py::test_init_default",
"tests/test_framerate_regulator.py::test_init_unlimited",
"tests/test_framerate_regulator.py::test_init_30fps",
"tests/test_framerate_regulator.py::test_sleep",
"tests/test_framerate_regulator.py::test_average_transit_time",
"tests/test_legacy.py::test_textsize",
"tests/test_legacy.py::test_text_space",
"tests/test_legacy.py::test_text_char",
"tests/test_legacy.py::test_show_message",
"tests/test_serial.py::test_i2c_init_device_not_found",
"tests/test_serial.py::test_i2c_init_device_address_error",
"tests/test_serial.py::test_i2c_init_no_bus",
"tests/test_serial.py::test_i2c_init_bus_provided",
"tests/test_serial.py::test_i2c_command",
"tests/test_serial.py::test_i2c_command_device_not_found_error",
"tests/test_serial.py::test_i2c_data",
"tests/test_serial.py::test_i2c_data_chunked",
"tests/test_serial.py::test_i2c_cleanup",
"tests/test_serial.py::test_spi_init",
"tests/test_serial.py::test_spi_init_params_deprecated",
"tests/test_serial.py::test_spi_init_invalid_bus_speed",
"tests/test_serial.py::test_spi_command",
"tests/test_serial.py::test_spi_data",
"tests/test_serial.py::test_spi_cleanup",
"tests/test_serial.py::test_spi_init_device_not_found",
"tests/test_serial.py::test_spi_unsupported_gpio_platform",
"tests/test_spritesheet.py::test_init",
"tests/test_spritesheet.py::test_len",
"tests/test_spritesheet.py::test_caching",
"tests/test_spritesheet.py::test_get",
"tests/test_spritesheet.py::test_get_string",
"tests/test_spritesheet.py::test_get_outofrange",
"tests/test_spritesheet.py::test_animate_unknown_seq",
"tests/test_spritesheet.py::test_animate_finite_seq",
"tests/test_spritesheet.py::test_animate_slow_seq",
"tests/test_spritesheet.py::test_animate_infinite_seq",
"tests/test_spritesheet.py::test_animate_subroutine",
"tests/test_util.py::test_deprecation"
]
| []
| MIT License | 1,110 | [
"luma/core/serial.py",
"doc/api-documentation.rst",
"setup.py",
"luma/core/threadpool.py",
".travis.yml",
"luma/core/device.py",
"luma/core/lib.py",
"luma/core/util.py",
"luma/core/sprite_system.py"
]
| [
"luma/core/serial.py",
"doc/api-documentation.rst",
"setup.py",
"luma/core/threadpool.py",
".travis.yml",
"luma/core/device.py",
"luma/core/lib.py",
"luma/core/util.py",
"luma/core/sprite_system.py"
]
|
mardiros__aioxmlrpc-10 | 5480f35630d166bfa686e2e02b28c581e16bb723 | 2017-03-23 00:48:03 | 5480f35630d166bfa686e2e02b28c581e16bb723 | diff --git a/aioxmlrpc/client.py b/aioxmlrpc/client.py
index 35bca86..5ed292b 100644
--- a/aioxmlrpc/client.py
+++ b/aioxmlrpc/client.py
@@ -79,8 +79,14 @@ class AioTransport(xmlrpc.Transport):
raise
except Exception as exc:
log.error('Unexpected error', exc_info=True)
- raise ProtocolError(url, response.status,
- str(exc), response.headers)
+ if response is not None:
+ errcode = response.status
+ headers = response.headers
+ else:
+ errcode = 0
+ headers = {}
+
+ raise ProtocolError(url, errcode, str(exc), headers)
return self.parse_response(body)
def parse_response(self, body):
| AttributeError when server closes socket
If the XMLRPC server closes the socket, which is rather common if it's Apache, I get this error:
```
Traceback (most recent call last):
File "/usr/share/routest/env/lib/python3.5/site-packages/aioxmlrpc/client.py", line 71, in request
connector=self._connector, loop=self._loop)
File "/usr/share/routest/env/lib/python3.5/site-packages/aiohttp/client.py", line 605, in __iter__
return (yield from self._coro)
File "/usr/share/routest/env/lib/python3.5/site-packages/aiohttp/client.py", line 161, in _request
raise RuntimeError('Session is closed')
RuntimeError: Session is closed
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/share/routest/env/lib/python3.5/site-packages/routest/resource/space/serializers.py", line 159, in check_job_status
self.job_status = await self.lava.scheduler.job_status(self.job_id)
File "/usr/share/routest/env/lib/python3.5/site-packages/aioxmlrpc/client.py", line 37, in __call__
ret = yield from self.__send(self.__name, args)
File "/usr/share/routest/env/lib/python3.5/site-packages/aioxmlrpc/client.py", line 130, in __request
verbose=self.__verbose
File "/usr/share/routest/env/lib/python3.5/site-packages/aioxmlrpc/client.py", line 80, in request
raise ProtocolError(url, response.status,
AttributeError: 'NoneType' object has no attribute 'status'
``` | mardiros/aioxmlrpc | diff --git a/aioxmlrpc/tests/test_client.py b/aioxmlrpc/tests/test_client.py
index 304045d..98fef6b 100644
--- a/aioxmlrpc/tests/test_client.py
+++ b/aioxmlrpc/tests/test_client.py
@@ -128,3 +128,28 @@ class ServerProxyTestCase(TestCase):
self.assertEqual(response, 1)
self.assertIs(self.loop, client._loop)
self.assertTrue(transp._connector.close.called)
+
+
[email protected]
+def failing_request(*args, **kwargs):
+ raise OSError
+
+
+class HTTPErrorTestCase(TestCase):
+
+ def setUp(self):
+ self.loop = asyncio.new_event_loop()
+ asyncio.set_event_loop(None)
+ self.aiohttp_request = mock.patch('aiohttp.request', new=failing_request)
+ self.aiohttp_request.start()
+
+ def tearDown(self):
+ self.aiohttp_request.stop()
+
+ def test_http_error(self):
+ from aioxmlrpc.client import ServerProxy, ProtocolError
+ client = ServerProxy('http://nonexistent/nonexistent', loop=self.loop)
+ self.assertRaises(ProtocolError,
+ self.loop.run_until_complete,
+ client.name.space.proxfyiedcall()
+ )
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 1
} | 0.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.5",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | aiohttp==3.8.6
aiosignal==1.2.0
-e git+https://github.com/mardiros/aioxmlrpc.git@5480f35630d166bfa686e2e02b28c581e16bb723#egg=aioxmlrpc
async-timeout==4.0.2
asynctest==0.13.0
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
certifi==2021.5.30
charset-normalizer==3.0.1
frozenlist==1.2.0
idna==3.10
idna-ssl==1.1.0
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
multidict==5.2.0
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
yarl==1.7.2
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: aioxmlrpc
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib-metadata=4.8.1=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- aiohttp==3.8.6
- aiosignal==1.2.0
- async-timeout==4.0.2
- asynctest==0.13.0
- charset-normalizer==3.0.1
- frozenlist==1.2.0
- idna==3.10
- idna-ssl==1.1.0
- multidict==5.2.0
- yarl==1.7.2
prefix: /opt/conda/envs/aioxmlrpc
| [
"aioxmlrpc/tests/test_client.py::HTTPErrorTestCase::test_http_error"
]
| []
| [
"aioxmlrpc/tests/test_client.py::ServerProxyTestCase::test_close_transport",
"aioxmlrpc/tests/test_client.py::ServerProxyTestCase::test_http_500",
"aioxmlrpc/tests/test_client.py::ServerProxyTestCase::test_xmlrpc_fault",
"aioxmlrpc/tests/test_client.py::ServerProxyTestCase::test_xmlrpc_ok",
"aioxmlrpc/tests/test_client.py::ServerProxyTestCase::test_xmlrpc_ok_global_loop"
]
| []
| MIT License | 1,111 | [
"aioxmlrpc/client.py"
]
| [
"aioxmlrpc/client.py"
]
|
|
google__mobly-164 | 464a16aee403100025085846f0b6a5e29620e5b6 | 2017-03-23 02:48:42 | 777e2f766959889d12108024be0070fd0939dd6a | xpconanfan:
Review status: 0 of 4 files reviewed at latest revision, 1 unresolved discussion.
---
*[tests/mobly/utils_test.py, line 33 at r1](https://reviewable.io:443/reviews/google/mobly/164#-Kft4IlohqXOZQbkgvwN:-Kft4IlpLhgfHHd6AJ8z:b-a1kill) ([raw file](https://github.com/google/mobly/blob/7b01a61aa1a22614e0e70eae04eb10370a4c8f4f/tests/mobly/utils_test.py#L33)):*
> ```Python
> "Process .* has terminated"):
> utils.start_standing_subprocess(
> ['sleep','0'], check_health_delay=0.5)
> ```
why the change from 0.1 to 0.5 and str to list?
this does not seem to be related to `exe_cmd`?
---
*Comments from [Reviewable](https://reviewable.io:443/reviews/google/mobly/164)*
<!-- Sent from Reviewable.io -->
adorokhine:
Review status: 0 of 4 files reviewed at latest revision, 1 unresolved discussion.
---
*[tests/mobly/utils_test.py, line 33 at r1](https://reviewable.io:443/reviews/google/mobly/164#-Kft4IlohqXOZQbkgvwN:-Kft95spFobm0QYGR4uD:b-896fix) ([raw file](https://github.com/google/mobly/blob/454b556b53133176e6e1f02ae6e0b3f41739bcb1/tests/mobly/utils_test.py#L33)):*
<details><summary><i>Previously, xpconanfan (Ang Li) wrote…</i></summary><blockquote>
why the change from 0.1 to 0.5 and str to list?
this does not seem to be related to `exe_cmd`?
</blockquote></details>
Done.
---
*Comments from [Reviewable](https://reviewable.io:443/reviews/google/mobly/164)*
<!-- Sent from Reviewable.io -->
dthkao: <img class="emoji" title=":lgtm:" alt=":lgtm:" align="absmiddle" src="https://reviewable.io/lgtm.png" height="20" width="61"/>
---
Review status: 0 of 4 files reviewed at latest revision, 1 unresolved discussion.
---
*Comments from [Reviewable](https://reviewable.io:443/reviews/google/mobly/164#-:-KftHytyvUUIumc_97NM:bnfp4nl)*
<!-- Sent from Reviewable.io -->
xpconanfan:
Review status: 0 of 4 files reviewed at latest revision, 1 unresolved discussion.
---
*[tests/mobly/utils_test.py, line 33 at r1](https://reviewable.io:443/reviews/google/mobly/164#-Kft4IlohqXOZQbkgvwN:-Kg0s0bj_r8a4fdh3Uew:b-yz7gkv) ([raw file](https://github.com/google/mobly/blob/454b556b53133176e6e1f02ae6e0b3f41739bcb1/tests/mobly/utils_test.py#L33)):*
<details><summary><i>Previously, adorokhine (Alexander Dorokhine) wrote…</i></summary><blockquote>
Done.
</blockquote></details>
I still see this being changed to a list.
Is this a leak from the start standing subprocess change?
---
*Comments from [Reviewable](https://reviewable.io:443/reviews/google/mobly/164)*
<!-- Sent from Reviewable.io -->
adorokhine:
Reviewed 3 of 4 files at r1, 1 of 1 files at r2.
Review status: all files reviewed at latest revision, 1 unresolved discussion.
---
*[tests/mobly/utils_test.py, line 33 at r1](https://reviewable.io:443/reviews/google/mobly/164#-Kft4IlohqXOZQbkgvwN:-Kg1Ig0bDteHmFq-Yszy:brssb8g) ([raw file](https://github.com/google/mobly/blob/454b556b53133176e6e1f02ae6e0b3f41739bcb1/tests/mobly/utils_test.py#L33)):*
<details><summary><i>Previously, xpconanfan (Ang Li) wrote…</i></summary><blockquote>
I still see this being changed to a list.
Is this a leak from the start standing subprocess change?
</blockquote></details>
Thanks, good catch. Done.
---
*Comments from [Reviewable](https://reviewable.io:443/reviews/google/mobly/164)*
<!-- Sent from Reviewable.io -->
xpconanfan: <img class="emoji" title=":lgtm:" alt=":lgtm:" align="absmiddle" src="https://reviewable.io/lgtm.png" height="20" width="61"/>
---
Review status: all files reviewed at latest revision, 1 unresolved discussion.
---
*Comments from [Reviewable](https://reviewable.io:443/reviews/google/mobly/164#-:-KgFrMky1iYJR0yzvtbz:bnfp4nl)*
<!-- Sent from Reviewable.io -->
| diff --git a/mobly/controllers/sniffer_lib/local/local_base.py b/mobly/controllers/sniffer_lib/local/local_base.py
index 859c242..781e4b6 100644
--- a/mobly/controllers/sniffer_lib/local/local_base.py
+++ b/mobly/controllers/sniffer_lib/local/local_base.py
@@ -54,9 +54,10 @@ class SnifferLocalBase(sniffer.Sniffer):
self._base_configs = base_configs
try:
- utils.exe_cmd("ifconfig", self._interface, "down")
- utils.exe_cmd("iwconfig", self._interface, "mode", "monitor")
- utils.exe_cmd("ifconfig", self._interface, "up")
+ subprocess.check_call(['ifconfig', self._interface, 'down'])
+ subprocess.check_call(
+ ['iwconfig', self._interface, 'mode', 'monitor'])
+ subprocess.check_call(['ifconfig', self._interface, 'up'])
except Exception as err:
raise sniffer.ExecutionError(err)
@@ -87,8 +88,11 @@ class SnifferLocalBase(sniffer.Sniffer):
if sniffer.Sniffer.CONFIG_KEY_CHANNEL in final_configs:
try:
- utils.exe_cmd("iwconfig", self._interface, "channel",
- str(final_configs[sniffer.Sniffer.CONFIG_KEY_CHANNEL]))
+ subprocess.check_call([
+ 'iwconfig',
+ self._interface,
+ 'channel',
+ str(final_configs[sniffer.Sniffer.CONFIG_KEY_CHANNEL])])
except Exception as err:
raise sniffer.ExecutionError(err)
diff --git a/mobly/utils.py b/mobly/utils.py
index ba47c60..c9be01e 100644
--- a/mobly/utils.py
+++ b/mobly/utils.py
@@ -280,27 +280,6 @@ def concurrent_exec(func, param_list):
return return_vals
-def exe_cmd(*cmds):
- """Executes commands in a new shell.
-
- Args:
- cmds: A sequence of commands and arguments.
-
- Returns:
- The output of the command run.
-
- Raises:
- OSError is raised if an error occurred during the command execution.
- """
- cmd = ' '.join(cmds)
- proc = subprocess.Popen(
- cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
- (out, err) = proc.communicate()
- if not err:
- return out
- raise OSError(err)
-
-
def _assert_subprocess_running(proc):
"""Checks if a subprocess has terminated on its own.
@@ -368,14 +347,21 @@ def stop_standing_subprocess(proc, kill_signal=signal.SIGTERM):
_assert_subprocess_running(proc)
process = psutil.Process(pid)
success = True
- for child in process.children(recursive=True):
+ try:
+ children = process.children(recursive=True)
+ except AttributeError:
+ # Handle versions <3.0.0 of psutil.
+ children = process.get_children(recursive=True)
+ for child in children:
try:
child.kill()
+ child.wait(timeout=10)
except:
success = False
logging.exception('Failed to kill standing subprocess %d', child.pid)
try:
process.kill()
+ process.wait(timeout=10)
except:
success = False
logging.exception('Failed to kill standing subprocess %d', pid)
| Flake in unit tests
Reported in #87
https://travis-ci.org/google/mobly/jobs/197104959
=================================== FAILURES ===================================
____________________ UtilsTest.test_start_standing_subproc _____________________
self = <tests.mobly.utils_test.UtilsTest testMethod=test_start_standing_subproc>
def test_start_standing_subproc(self):
with self.assertRaisesRegexp(utils.Error,
"Process .* has terminated"):
\> utils.start_standing_subprocess("sleep 0", check_health_delay=0.1)
E AssertionError: Error not raised
tests/mobly/utils_test.py:32: AssertionErro | google/mobly | diff --git a/tests/mobly/controllers/android_device_test.py b/tests/mobly/controllers/android_device_test.py
index c5b3733..3a0940e 100755
--- a/tests/mobly/controllers/android_device_test.py
+++ b/tests/mobly/controllers/android_device_test.py
@@ -209,8 +209,7 @@ class AndroidDeviceTest(unittest.TestCase):
@mock.patch('mobly.controllers.android_device_lib.fastboot.FastbootProxy',
return_value=mock_android_device.MockFastbootProxy(1))
@mock.patch('mobly.utils.create_dir')
- @mock.patch('mobly.utils.exe_cmd')
- def test_AndroidDevice_take_bug_report(self, exe_mock, create_dir_mock,
+ def test_AndroidDevice_take_bug_report(self, create_dir_mock,
FastbootProxy, MockAdbProxy):
"""Verifies AndroidDevice.take_bug_report calls the correct adb command
and writes the bugreport file to the correct path.
@@ -227,8 +226,7 @@ class AndroidDeviceTest(unittest.TestCase):
@mock.patch('mobly.controllers.android_device_lib.fastboot.FastbootProxy',
return_value=mock_android_device.MockFastbootProxy(1))
@mock.patch('mobly.utils.create_dir')
- @mock.patch('mobly.utils.exe_cmd')
- def test_AndroidDevice_take_bug_report_fail(self, exe_mock, create_dir_mock,
+ def test_AndroidDevice_take_bug_report_fail(self, create_dir_mock,
FastbootProxy, MockAdbProxy):
"""Verifies AndroidDevice.take_bug_report writes out the correct message
when taking bugreport fails.
@@ -245,8 +243,7 @@ class AndroidDeviceTest(unittest.TestCase):
@mock.patch('mobly.controllers.android_device_lib.fastboot.FastbootProxy',
return_value=mock_android_device.MockFastbootProxy(1))
@mock.patch('mobly.utils.create_dir')
- @mock.patch('mobly.utils.exe_cmd')
- def test_AndroidDevice_take_bug_report_fallback(self, exe_mock,
+ def test_AndroidDevice_take_bug_report_fallback(self,
create_dir_mock, FastbootProxy, MockAdbProxy):
"""Verifies AndroidDevice.take_bug_report falls back to traditional
bugreport on builds that do not have bugreportz.
diff --git a/tests/mobly/utils_test.py b/tests/mobly/utils_test.py
index fb2f519..de618a5 100755
--- a/tests/mobly/utils_test.py
+++ b/tests/mobly/utils_test.py
@@ -32,6 +32,11 @@ class UtilsTest(unittest.TestCase):
utils.start_standing_subprocess("sleep 0", check_health_delay=0.1)
def test_stop_standing_subproc(self):
+ p = utils.start_standing_subprocess('sleep 5')
+ utils.stop_standing_subprocess(p)
+ self.assertIsNotNone(p.poll())
+
+ def test_stop_standing_subproc_already_dead(self):
p = utils.start_standing_subprocess("sleep 0")
time.sleep(0.1)
with self.assertRaisesRegexp(utils.Error,
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_hyperlinks",
"has_many_modified_files",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 2
} | 1.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | exceptiongroup==1.2.2
future==1.0.0
iniconfig==2.1.0
-e git+https://github.com/google/mobly.git@464a16aee403100025085846f0b6a5e29620e5b6#egg=mobly
mock==1.0.1
packaging==24.2
pluggy==1.5.0
psutil==7.0.0
pytest==8.3.5
pytz==2025.2
PyYAML==6.0.2
tomli==2.2.1
| name: mobly
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- exceptiongroup==1.2.2
- future==1.0.0
- iniconfig==2.1.0
- mock==1.0.1
- packaging==24.2
- pluggy==1.5.0
- psutil==7.0.0
- pytest==8.3.5
- pytz==2025.2
- pyyaml==6.0.2
- tomli==2.2.1
prefix: /opt/conda/envs/mobly
| [
"tests/mobly/utils_test.py::UtilsTest::test_stop_standing_subproc"
]
| []
| [
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_AndroidDevice_build_info",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_AndroidDevice_cat_adb_log",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_AndroidDevice_debug_tag",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_AndroidDevice_instantiation",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_AndroidDevice_load_snippet",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_AndroidDevice_load_snippet_dup_attribute_name",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_AndroidDevice_load_snippet_dup_package",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_AndroidDevice_load_snippet_dup_snippet_name",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_AndroidDevice_snippet_cleanup",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_AndroidDevice_take_bug_report",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_AndroidDevice_take_bug_report_fail",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_AndroidDevice_take_bug_report_fallback",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_AndroidDevice_take_logcat",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_AndroidDevice_take_logcat_with_user_param",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_create_with_dict_list",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_create_with_empty_config",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_create_with_no_valid_config",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_create_with_not_list_config",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_create_with_pickup_all",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_create_with_string_list",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_get_device_no_match",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_get_device_success_with_serial",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_get_device_success_with_serial_and_extra_field",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_get_device_too_many_matches",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_start_services_on_ads",
"tests/mobly/utils_test.py::UtilsTest::test_is_port_available_negative",
"tests/mobly/utils_test.py::UtilsTest::test_is_port_available_positive",
"tests/mobly/utils_test.py::UtilsTest::test_start_standing_subproc",
"tests/mobly/utils_test.py::UtilsTest::test_stop_standing_subproc_already_dead"
]
| []
| Apache License 2.0 | 1,112 | [
"mobly/controllers/sniffer_lib/local/local_base.py",
"mobly/utils.py"
]
| [
"mobly/controllers/sniffer_lib/local/local_base.py",
"mobly/utils.py"
]
|
google__mobly-165 | 464a16aee403100025085846f0b6a5e29620e5b6 | 2017-03-23 03:39:23 | 777e2f766959889d12108024be0070fd0939dd6a | dthkao: <img class="emoji" title=":lgtm:" alt=":lgtm:" align="absmiddle" src="https://reviewable.io/lgtm.png" height="20" width="61"/>
---
Review status: 0 of 1 files reviewed at latest revision, all discussions resolved.
---
*Comments from [Reviewable](https://reviewable.io:443/reviews/google/mobly/165#-:-KftLp8GHK_38kOJw7je:bnfp4nl)*
<!-- Sent from Reviewable.io -->
dthkao: THANKS!
---
Review status: 0 of 1 files reviewed at latest revision, all discussions resolved.
---
*Comments from [Reviewable](https://reviewable.io:443/reviews/google/mobly/165#-:-KftLtV6H66npyt40ciz:bp121n7)*
<!-- Sent from Reviewable.io -->
| diff --git a/mobly/controllers/sniffer_lib/local/local_base.py b/mobly/controllers/sniffer_lib/local/local_base.py
index 859c242..781e4b6 100644
--- a/mobly/controllers/sniffer_lib/local/local_base.py
+++ b/mobly/controllers/sniffer_lib/local/local_base.py
@@ -54,9 +54,10 @@ class SnifferLocalBase(sniffer.Sniffer):
self._base_configs = base_configs
try:
- utils.exe_cmd("ifconfig", self._interface, "down")
- utils.exe_cmd("iwconfig", self._interface, "mode", "monitor")
- utils.exe_cmd("ifconfig", self._interface, "up")
+ subprocess.check_call(['ifconfig', self._interface, 'down'])
+ subprocess.check_call(
+ ['iwconfig', self._interface, 'mode', 'monitor'])
+ subprocess.check_call(['ifconfig', self._interface, 'up'])
except Exception as err:
raise sniffer.ExecutionError(err)
@@ -87,8 +88,11 @@ class SnifferLocalBase(sniffer.Sniffer):
if sniffer.Sniffer.CONFIG_KEY_CHANNEL in final_configs:
try:
- utils.exe_cmd("iwconfig", self._interface, "channel",
- str(final_configs[sniffer.Sniffer.CONFIG_KEY_CHANNEL]))
+ subprocess.check_call([
+ 'iwconfig',
+ self._interface,
+ 'channel',
+ str(final_configs[sniffer.Sniffer.CONFIG_KEY_CHANNEL])])
except Exception as err:
raise sniffer.ExecutionError(err)
diff --git a/mobly/utils.py b/mobly/utils.py
index ba47c60..338c4e8 100644
--- a/mobly/utils.py
+++ b/mobly/utils.py
@@ -280,27 +280,6 @@ def concurrent_exec(func, param_list):
return return_vals
-def exe_cmd(*cmds):
- """Executes commands in a new shell.
-
- Args:
- cmds: A sequence of commands and arguments.
-
- Returns:
- The output of the command run.
-
- Raises:
- OSError is raised if an error occurred during the command execution.
- """
- cmd = ' '.join(cmds)
- proc = subprocess.Popen(
- cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
- (out, err) = proc.communicate()
- if not err:
- return out
- raise OSError(err)
-
-
def _assert_subprocess_running(proc):
"""Checks if a subprocess has terminated on its own.
@@ -339,10 +318,16 @@ def start_standing_subprocess(cmd, check_health_delay=0):
"""
proc = subprocess.Popen(
cmd,
+ stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=True)
logging.debug('Start standing subprocess with cmd: %s', cmd)
+ # Leaving stdin open causes problems for input, e.g. breaking the
+ # code.inspect() shell (http://stackoverflow.com/a/25512460/1612937), so
+ # explicitly close it assuming it is not needed for standing subprocesses.
+ proc.stdin.close()
+ proc.stdin = None
if check_health_delay > 0:
time.sleep(check_health_delay)
_assert_subprocess_running(proc)
@@ -368,14 +353,21 @@ def stop_standing_subprocess(proc, kill_signal=signal.SIGTERM):
_assert_subprocess_running(proc)
process = psutil.Process(pid)
success = True
- for child in process.children(recursive=True):
+ try:
+ children = process.children(recursive=True)
+ except AttributeError:
+ # Handle versions <3.0.0 of psutil.
+ children = process.get_children(recursive=True)
+ for child in children:
try:
child.kill()
+ child.wait(timeout=10)
except:
success = False
logging.exception('Failed to kill standing subprocess %d', child.pid)
try:
process.kill()
+ process.wait(timeout=10)
except:
success = False
logging.exception('Failed to kill standing subprocess %d', pid)
| Flake in unit tests
Reported in #87
https://travis-ci.org/google/mobly/jobs/197104959
=================================== FAILURES ===================================
____________________ UtilsTest.test_start_standing_subproc _____________________
self = <tests.mobly.utils_test.UtilsTest testMethod=test_start_standing_subproc>
def test_start_standing_subproc(self):
with self.assertRaisesRegexp(utils.Error,
"Process .* has terminated"):
\> utils.start_standing_subprocess("sleep 0", check_health_delay=0.1)
E AssertionError: Error not raised
tests/mobly/utils_test.py:32: AssertionErro | google/mobly | diff --git a/tests/mobly/controllers/android_device_test.py b/tests/mobly/controllers/android_device_test.py
index c5b3733..3a0940e 100755
--- a/tests/mobly/controllers/android_device_test.py
+++ b/tests/mobly/controllers/android_device_test.py
@@ -209,8 +209,7 @@ class AndroidDeviceTest(unittest.TestCase):
@mock.patch('mobly.controllers.android_device_lib.fastboot.FastbootProxy',
return_value=mock_android_device.MockFastbootProxy(1))
@mock.patch('mobly.utils.create_dir')
- @mock.patch('mobly.utils.exe_cmd')
- def test_AndroidDevice_take_bug_report(self, exe_mock, create_dir_mock,
+ def test_AndroidDevice_take_bug_report(self, create_dir_mock,
FastbootProxy, MockAdbProxy):
"""Verifies AndroidDevice.take_bug_report calls the correct adb command
and writes the bugreport file to the correct path.
@@ -227,8 +226,7 @@ class AndroidDeviceTest(unittest.TestCase):
@mock.patch('mobly.controllers.android_device_lib.fastboot.FastbootProxy',
return_value=mock_android_device.MockFastbootProxy(1))
@mock.patch('mobly.utils.create_dir')
- @mock.patch('mobly.utils.exe_cmd')
- def test_AndroidDevice_take_bug_report_fail(self, exe_mock, create_dir_mock,
+ def test_AndroidDevice_take_bug_report_fail(self, create_dir_mock,
FastbootProxy, MockAdbProxy):
"""Verifies AndroidDevice.take_bug_report writes out the correct message
when taking bugreport fails.
@@ -245,8 +243,7 @@ class AndroidDeviceTest(unittest.TestCase):
@mock.patch('mobly.controllers.android_device_lib.fastboot.FastbootProxy',
return_value=mock_android_device.MockFastbootProxy(1))
@mock.patch('mobly.utils.create_dir')
- @mock.patch('mobly.utils.exe_cmd')
- def test_AndroidDevice_take_bug_report_fallback(self, exe_mock,
+ def test_AndroidDevice_take_bug_report_fallback(self,
create_dir_mock, FastbootProxy, MockAdbProxy):
"""Verifies AndroidDevice.take_bug_report falls back to traditional
bugreport on builds that do not have bugreportz.
diff --git a/tests/mobly/utils_test.py b/tests/mobly/utils_test.py
index fb2f519..73ea149 100755
--- a/tests/mobly/utils_test.py
+++ b/tests/mobly/utils_test.py
@@ -29,11 +29,16 @@ class UtilsTest(unittest.TestCase):
def test_start_standing_subproc(self):
with self.assertRaisesRegexp(utils.Error,
"Process .* has terminated"):
- utils.start_standing_subprocess("sleep 0", check_health_delay=0.1)
+ utils.start_standing_subprocess("sleep 0", check_health_delay=0.5)
def test_stop_standing_subproc(self):
+ p = utils.start_standing_subprocess('sleep 5')
+ utils.stop_standing_subprocess(p)
+ self.assertIsNotNone(p.poll())
+
+ def test_stop_standing_subproc_already_dead(self):
p = utils.start_standing_subprocess("sleep 0")
- time.sleep(0.1)
+ time.sleep(0.5)
with self.assertRaisesRegexp(utils.Error,
"Process .* has terminated"):
utils.stop_standing_subprocess(p)
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_hyperlinks",
"has_many_modified_files",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 2
} | 1.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
future==1.0.0
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
-e git+https://github.com/google/mobly.git@464a16aee403100025085846f0b6a5e29620e5b6#egg=mobly
mock==1.0.1
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
psutil==7.0.0
pytest @ file:///croot/pytest_1738938843180/work
pytz==2025.2
PyYAML==6.0.2
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
| name: mobly
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- future==1.0.0
- mock==1.0.1
- psutil==7.0.0
- pytz==2025.2
- pyyaml==6.0.2
prefix: /opt/conda/envs/mobly
| [
"tests/mobly/utils_test.py::UtilsTest::test_stop_standing_subproc"
]
| []
| [
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_AndroidDevice_build_info",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_AndroidDevice_cat_adb_log",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_AndroidDevice_debug_tag",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_AndroidDevice_instantiation",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_AndroidDevice_load_snippet",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_AndroidDevice_load_snippet_dup_attribute_name",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_AndroidDevice_load_snippet_dup_package",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_AndroidDevice_load_snippet_dup_snippet_name",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_AndroidDevice_snippet_cleanup",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_AndroidDevice_take_bug_report",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_AndroidDevice_take_bug_report_fail",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_AndroidDevice_take_bug_report_fallback",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_AndroidDevice_take_logcat",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_AndroidDevice_take_logcat_with_user_param",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_create_with_dict_list",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_create_with_empty_config",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_create_with_no_valid_config",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_create_with_not_list_config",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_create_with_pickup_all",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_create_with_string_list",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_get_device_no_match",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_get_device_success_with_serial",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_get_device_success_with_serial_and_extra_field",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_get_device_too_many_matches",
"tests/mobly/controllers/android_device_test.py::AndroidDeviceTest::test_start_services_on_ads",
"tests/mobly/utils_test.py::UtilsTest::test_is_port_available_negative",
"tests/mobly/utils_test.py::UtilsTest::test_is_port_available_positive",
"tests/mobly/utils_test.py::UtilsTest::test_start_standing_subproc",
"tests/mobly/utils_test.py::UtilsTest::test_stop_standing_subproc_already_dead"
]
| []
| Apache License 2.0 | 1,113 | [
"mobly/controllers/sniffer_lib/local/local_base.py",
"mobly/utils.py"
]
| [
"mobly/controllers/sniffer_lib/local/local_base.py",
"mobly/utils.py"
]
|
colour-science__colour-314 | c8d6d39746fc04f65306acfabe636d81229965cd | 2017-03-23 08:27:50 | 3cd6ab8d4c3483bcdeb2d7ef33967160808c0bb2 | diff --git a/colour/models/rgb/transfer_functions/bt_1886.py b/colour/models/rgb/transfer_functions/bt_1886.py
index 73fb873e9..a366685f9 100644
--- a/colour/models/rgb/transfer_functions/bt_1886.py
+++ b/colour/models/rgb/transfer_functions/bt_1886.py
@@ -41,7 +41,7 @@ __all__ = ['oetf_BT1886',
'eotf_BT1886']
-def oetf_BT1886(L, L_B=64, L_W=940):
+def oetf_BT1886(L, L_B=0, L_W=1):
"""
Defines *Recommendation ITU-R BT.1886* opto-electrical transfer function
(OETF / OECF).
@@ -69,7 +69,7 @@ def oetf_BT1886(L, L_B=64, L_W=940):
Examples
--------
- >>> oetf_BT1886(277.98159179331145) # doctest: +ELLIPSIS
+ >>> oetf_BT1886(0.11699185725296059) # doctest: +ELLIPSIS
0.4090077...
"""
@@ -92,7 +92,7 @@ def oetf_BT1886(L, L_B=64, L_W=940):
return V
-def eotf_BT1886(V, L_B=64, L_W=940):
+def eotf_BT1886(V, L_B=0, L_W=1):
"""
Defines *Recommendation ITU-R BT.1886* electro-optical transfer function
(EOTF / EOCF).
@@ -118,7 +118,7 @@ def eotf_BT1886(V, L_B=64, L_W=940):
Examples
--------
>>> eotf_BT1886(0.409007728864150) # doctest: +ELLIPSIS
- 277.9815917...
+ 0.1169918...
"""
V = np.asarray(V)
| Change "L_B" and "L_W" default argument values for "BT.1886" eotf.
Current default values are 64 and 940, i.e. 10-bit black and white code values. L_B and L_W should be screen luminance in cd/m^2 if the function is to return absolute luminance, or e.g. 0 and 1 to return normalised values. | colour-science/colour | diff --git a/colour/models/rgb/transfer_functions/tests/tests_bt_1886.py b/colour/models/rgb/transfer_functions/tests/tests_bt_1886.py
index b9254dd3a..d259abb97 100644
--- a/colour/models/rgb/transfer_functions/tests/tests_bt_1886.py
+++ b/colour/models/rgb/transfer_functions/tests/tests_bt_1886.py
@@ -38,18 +38,18 @@ oetf_BT1886` definition.
"""
self.assertAlmostEqual(
- oetf_BT1886(64.0),
+ oetf_BT1886(0.0),
0.0,
places=7)
self.assertAlmostEqual(
- oetf_BT1886(184.32),
- 0.268401363726554,
+ oetf_BT1886(0.016317514686316),
+ 0.18,
places=7)
self.assertAlmostEqual(
- oetf_BT1886(940),
- 1.000000000000000,
+ oetf_BT1886(1.0),
+ 1.0,
places=7)
def test_n_dimensional_oetf_BT1886(self):
@@ -58,8 +58,8 @@ oetf_BT1886` definition.
oetf_BT1886` definition n-dimensional arrays support.
"""
- L = 184.32
- V = 0.268401363726554
+ L = 0.016317514686316
+ V = 0.18
np.testing.assert_almost_equal(
oetf_BT1886(L),
V,
@@ -111,17 +111,17 @@ eotf_BT1886` definition.
self.assertAlmostEqual(
eotf_BT1886(0.0),
- 64.0,
+ 0.0,
places=7)
self.assertAlmostEqual(
eotf_BT1886(0.18),
- 136.58617957,
+ 0.016317514686316,
places=7)
self.assertAlmostEqual(
eotf_BT1886(1.0),
- 940.00000000,
+ 1.0,
places=7)
def test_n_dimensional_eotf_BT1886(self):
@@ -131,7 +131,7 @@ eotf_BT1886` definition n-dimensional arrays support.
"""
V = 0.18
- L = 136.58617957
+ L = 0.016317514686316
np.testing.assert_almost_equal(
eotf_BT1886(V),
L,
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 1
} | 0.3 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[tests]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-xdist",
"pytest-mock",
"pytest-asyncio"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
-e git+https://github.com/colour-science/colour.git@c8d6d39746fc04f65306acfabe636d81229965cd#egg=colour_science
coverage==6.2
execnet==1.9.0
flake8==5.0.4
importlib-metadata==4.2.0
iniconfig==1.1.1
mccabe==0.7.0
nose==1.3.7
numpy==1.19.5
packaging==21.3
pluggy==1.0.0
py==1.11.0
pycodestyle==2.9.1
pyflakes==2.5.0
pyparsing==3.1.4
pytest==7.0.1
pytest-asyncio==0.16.0
pytest-cov==4.0.0
pytest-mock==3.6.1
pytest-xdist==3.0.2
scipy==1.5.4
six==1.17.0
tomli==1.2.3
typing_extensions==4.1.1
zipp==3.6.0
| name: colour
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- coverage==6.2
- execnet==1.9.0
- flake8==5.0.4
- importlib-metadata==4.2.0
- iniconfig==1.1.1
- mccabe==0.7.0
- nose==1.3.7
- numpy==1.19.5
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pycodestyle==2.9.1
- pyflakes==2.5.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-asyncio==0.16.0
- pytest-cov==4.0.0
- pytest-mock==3.6.1
- pytest-xdist==3.0.2
- scipy==1.5.4
- six==1.17.0
- tomli==1.2.3
- typing-extensions==4.1.1
- zipp==3.6.0
prefix: /opt/conda/envs/colour
| [
"colour/models/rgb/transfer_functions/tests/tests_bt_1886.py::TestOetf_BT1886::test_n_dimensional_oetf_BT1886",
"colour/models/rgb/transfer_functions/tests/tests_bt_1886.py::TestOetf_BT1886::test_oetf_BT1886",
"colour/models/rgb/transfer_functions/tests/tests_bt_1886.py::TestEotf_BT1886::test_eotf_BT1886",
"colour/models/rgb/transfer_functions/tests/tests_bt_1886.py::TestEotf_BT1886::test_n_dimensional_eotf_BT1886"
]
| []
| [
"colour/models/rgb/transfer_functions/tests/tests_bt_1886.py::TestOetf_BT1886::test_nan_oetf_BT1886",
"colour/models/rgb/transfer_functions/tests/tests_bt_1886.py::TestEotf_BT1886::test_nan_eotf_BT1886"
]
| []
| BSD 3-Clause "New" or "Revised" License | 1,114 | [
"colour/models/rgb/transfer_functions/bt_1886.py"
]
| [
"colour/models/rgb/transfer_functions/bt_1886.py"
]
|
|
conjure-up__conjure-up-761 | 9e24a368a5a50bd8493484373829d5b8f3d2fb8e | 2017-03-23 21:59:52 | 33cca823b51a4f745145a7f5ecf0ceb0852f5bcf | diff --git a/README.md b/README.md
index 785176f..df56c32 100644
--- a/README.md
+++ b/README.md
@@ -48,6 +48,10 @@ with all the sensible defaults in place.
$ conjure-up canonical-kubernetes localhost
```
+Note that some spells require sudo for certain steps. When running in headless
+mode, conjure-up should be run with a user with passwordless sudo enabled (or
+sudo should be pre-authorized before invoking conjure-up).
+
## Destroying deployments
```
diff --git a/conjureup/controllers/clouds/tui.py b/conjureup/controllers/clouds/tui.py
index 3b2cd5d..d7c9c9f 100644
--- a/conjureup/controllers/clouds/tui.py
+++ b/conjureup/controllers/clouds/tui.py
@@ -33,7 +33,8 @@ class CloudsController:
"please wait.".format(app.current_model))
juju.add_model(app.current_model,
app.current_controller,
- app.current_cloud)
+ app.current_cloud,
+ allow_exists=True)
return controllers.use('deploy').render()
utils.error("Something happened with the controller or model, "
diff --git a/conjureup/controllers/steps/common.py b/conjureup/controllers/steps/common.py
index d990ade..d082321 100644
--- a/conjureup/controllers/steps/common.py
+++ b/conjureup/controllers/steps/common.py
@@ -1,10 +1,20 @@
import json
import os
+import os.path as path
from glob import glob
+import yaml
+
from conjureup import juju, utils
from conjureup.api.models import model_info
from conjureup.app_config import app
+from conjureup.models.step import StepModel
+
+
+class ValidationError(Exception):
+ def __init__(self, msg, *args, **kwargs):
+ self.msg = msg
+ super().__init__(msg, *args, **kwargs)
def set_env(inputs):
@@ -38,6 +48,22 @@ def get_step_metadata_filenames(steps_dir):
return sorted(glob(os.path.join(steps_dir, 'step-*.yaml')))
+def load_step(step_meta_path):
+ step_ex_path, ext = path.splitext(step_meta_path)
+ short_path = '/'.join(step_ex_path.split('/')[-3:])
+ if not path.isfile(step_ex_path):
+ raise ValidationError(
+ 'Step {} has no implementation'.format(short_path))
+ elif not os.access(step_ex_path, os.X_OK):
+ raise ValidationError(
+ 'Step {} is not executable, make sure it has '
+ 'the executable bit set'.format(short_path))
+ with open(step_meta_path) as fp:
+ step_metadata = yaml.load(fp.read())
+ model = StepModel(step_metadata, step_ex_path)
+ return model
+
+
def do_step(step_model, step_widget, message_cb, gui=False):
""" Processes steps in the background
@@ -50,6 +76,13 @@ def do_step(step_model, step_widget, message_cb, gui=False):
Step title and results message
"""
+ if step_model.needs_sudo:
+ password = None
+ if step_widget and step_widget.sudo_input:
+ password = step_widget.sudo_input.value
+ if not step_model.can_sudo(password):
+ raise Exception('Sudo failed')
+
# merge the step_widget input data into our step model
if gui:
step_widget.clear_button()
diff --git a/conjureup/controllers/steps/gui.py b/conjureup/controllers/steps/gui.py
index cc23212..d583f0b 100644
--- a/conjureup/controllers/steps/gui.py
+++ b/conjureup/controllers/steps/gui.py
@@ -1,14 +1,10 @@
-import os
import os.path as path
from collections import OrderedDict, deque
from functools import partial
-import yaml
-
from conjureup import async, controllers
from conjureup.app_config import app
from conjureup.controllers.steps import common
-from conjureup.models.step import StepModel
from conjureup.telemetry import track_exception, track_screen
from conjureup.ui.views.steps import StepsView
from conjureup.ui.widgets.step import StepWidget
@@ -35,6 +31,7 @@ class StepsController:
def get_result(self, future):
if future.exception():
self.__handle_exception('E002', future.exception())
+ return
step_model, step_widget = future.result()
@@ -104,38 +101,20 @@ class StepsController:
step_widgets = deque()
self.n_completed_steps = 0
for step_meta_path in self.step_metas:
- step_ex_path, ext = path.splitext(step_meta_path)
- short_path = '/'.join(step_ex_path.split('/')[-3:])
- err_msg = None
- if not path.isfile(step_ex_path):
- err_msg = (
- 'Step {} has no implementation'.format(short_path))
- elif not os.access(step_ex_path, os.X_OK):
- err_msg = (
- 'Step {} is not executable, make sure it has '
- 'the executable bit set'.format(short_path))
- if err_msg:
- app.log.error(err_msg)
- self.__handle_exception('E002', Exception(err_msg))
- return
- step_metadata = {}
- with open(step_meta_path) as fp:
- step_metadata = yaml.load(fp.read())
-
try:
# Store step model and its widget
- model = StepModel(step_metadata, step_meta_path)
+ model = common.load_step(step_meta_path)
+ if not model.viewable:
+ app.log.debug("Skipping step: {}".format(model.title))
+ continue
step_widget = StepWidget(
app,
model,
self.finish)
- if not step_widget.model.viewable:
- app.log.debug("Skipping step: {}".format(step_widget))
- continue
- model.path = step_ex_path
step_widgets.append(step_widget)
app.log.debug("Queueing step: {}".format(step_widget))
except Exception as e:
+ app.log.exception(e)
self.__handle_exception('E002', e)
return
diff --git a/conjureup/controllers/steps/tui.py b/conjureup/controllers/steps/tui.py
index 18ce191..76fa355 100644
--- a/conjureup/controllers/steps/tui.py
+++ b/conjureup/controllers/steps/tui.py
@@ -1,14 +1,10 @@
-import os
import os.path as path
import sys
from collections import OrderedDict
-import yaml
-
from conjureup import controllers, utils
from conjureup.app_config import app
from conjureup.controllers.steps import common
-from conjureup.models.step import StepModel
class StepsController:
@@ -26,25 +22,16 @@ class StepsController:
def render(self):
for step_meta_path in self.step_metas:
- step_ex_path, ext = path.splitext(step_meta_path)
- short_path = '/'.join(step_ex_path.split('/')[-3:])
- err_msg = None
- if not path.isfile(step_ex_path):
- err_msg = (
- 'Step {} has no implementation'.format(short_path))
- elif not os.access(step_ex_path, os.X_OK):
- err_msg = (
- 'Step {} is not executable, make sure it has '
- 'the executable bit set'.format(short_path))
- if err_msg:
- app.log.error(err_msg)
- utils.error(err_msg)
+ try:
+ model = common.load_step(step_meta_path)
+ except common.ValidationError as e:
+ app.log.error(e.msg)
+ utils.error(e.msg)
+ sys.exit(1)
+ if model.needs_sudo and not model.can_sudo():
+ utils.error("Step requires passwordless sudo: {}".format(
+ model.title))
sys.exit(1)
- step_metadata = {}
- with open(step_meta_path) as fp:
- step_metadata = yaml.load(fp.read())
- model = StepModel(step_metadata, step_meta_path)
- model.path = step_ex_path
app.log.debug("Running step: {}".format(model))
try:
step_model, _ = common.do_step(model,
diff --git a/conjureup/juju.py b/conjureup/juju.py
index 6f16517..a1f129d 100644
--- a/conjureup/juju.py
+++ b/conjureup/juju.py
@@ -756,12 +756,16 @@ def get_model(controller, name):
"Unable to find model: {}".format(name))
-def add_model(name, controller, cloud):
+def add_model(name, controller, cloud, allow_exists=False):
""" Adds a model to current controller
Arguments:
controller: controller to add model in
+ allow_exists: re-use an existing model, if one exists.
"""
+ if allow_exists and model_available():
+ return
+
sh = run('juju add-model {} -c {} {}'.format(name, controller, cloud),
shell=True, stdout=DEVNULL, stderr=PIPE)
if sh.returncode > 0:
diff --git a/conjureup/models/step.py b/conjureup/models/step.py
index a221100..1e20a27 100644
--- a/conjureup/models/step.py
+++ b/conjureup/models/step.py
@@ -1,5 +1,7 @@
""" Step model
"""
+import subprocess
+
from conjureup.app_config import app
@@ -10,8 +12,9 @@ class StepModel:
self.description = step.get('description', '')
self.result = ''
self.viewable = step.get('viewable', False)
-
+ self.needs_sudo = step.get('sudo', False)
self.additional_input = step.get('additional-input', [])
+ self.path = path
def __getattr__(self, attr):
"""
@@ -34,3 +37,16 @@ class StepModel:
self.description,
self.viewable,
self.path)
+
+ def can_sudo(self, password=None):
+ if not password:
+ result = subprocess.run(['sudo', '-nv'],
+ stdout=subprocess.DEVNULL,
+ stderr=subprocess.DEVNULL)
+ else:
+ password = '{}\n'.format(password).encode('utf8')
+ result = subprocess.run(['sudo', '-Sv'],
+ input=password,
+ stdout=subprocess.DEVNULL,
+ stderr=subprocess.DEVNULL)
+ return result.returncode == 0
diff --git a/conjureup/ui/widgets/step.py b/conjureup/ui/widgets/step.py
index e9c2568..c173152 100644
--- a/conjureup/ui/widgets/step.py
+++ b/conjureup/ui/widgets/step.py
@@ -37,6 +37,7 @@ class StepWidget(WidgetWrap):
self.result = Text(step_model.result)
self.output = Text(('info_minor', ''))
self.icon = Text(("info_minor", "\N{BALLOT BOX}"))
+ self.sudo_input = None
self.additional_input = []
if len(step_model.additional_input) > 0:
@@ -156,6 +157,23 @@ class StepWidget(WidgetWrap):
'pending_icon',
self.icon.get_text()[0]
))
+ if self.model.needs_sudo:
+ self.step_pile.contents.append((Padding.line_break(""),
+ self.step_pile.options()))
+ can_sudo = self.model.can_sudo()
+ label = 'This step requires sudo.'
+ if not can_sudo:
+ label += ' Please enter sudo password:'
+ columns = [
+ ('weight', 0.5, Padding.left(Text(('body', label)), left=5)),
+ ]
+ if not can_sudo:
+ self.sudo_input = PasswordEditor()
+ columns.append(('weight', 1, Color.string_input(
+ self.sudo_input, focus_map='string_input focus')))
+ self.step_pile.contents.append((Columns(columns, dividechars=3),
+ self.step_pile.options()))
+
for i in self.additional_input:
self.app.log.debug(i)
self.step_pile.contents.append((Padding.line_break(""),
| Add sudo support for spells
Some spell steps will require sudo. They need a way to indicate that need, the GUI needs to detect if prompting for the password is necessary, and the TUI needs to document that passwordless sudo is required. | conjure-up/conjure-up | diff --git a/test/test_controllers_clouds_tui.py b/test/test_controllers_clouds_tui.py
index e6e7386..d5193b2 100644
--- a/test/test_controllers_clouds_tui.py
+++ b/test/test_controllers_clouds_tui.py
@@ -96,7 +96,8 @@ class CloudsTUIFinishTestCase(unittest.TestCase):
self.mock_app.current_cloud = 'cloud'
self.controller.finish()
self.mock_juju.assert_has_calls([
- call.add_model(ANY, 'testcontroller', 'cloud')])
+ call.add_model(ANY, 'testcontroller', 'cloud',
+ allow_exists=True)])
def test_finish_no_controller(self):
"clouds.finish without existing controller"
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 3
},
"num_modified_files": 8
} | 2.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"nose",
"tox",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
charset-normalizer==2.0.12
-e git+https://github.com/conjure-up/conjure-up.git@9e24a368a5a50bd8493484373829d5b8f3d2fb8e#egg=conjure_up
distlib==0.3.9
filelock==3.4.1
idna==3.10
importlib-metadata==4.8.3
importlib-resources==5.4.0
iniconfig==1.1.1
Jinja2==3.0.3
juju-wait==2.8.4
MarkupSafe==2.0.1
nose==1.3.7
oauthlib==3.2.2
packaging==21.3
platformdirs==2.4.0
pluggy==1.0.0
prettytable==2.5.0
progressbar2==3.55.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
python-utils==3.5.2
PyYAML==6.0.1
q==2.7
requests==2.27.1
requests-oauthlib==2.0.0
six==1.17.0
termcolor==1.1.0
toml==0.10.2
tomli==1.2.3
tox==3.28.0
typing_extensions==4.1.1
urllib3==1.26.20
urwid==2.1.2
virtualenv==20.17.1
wcwidth==0.2.13
ws4py==0.3.4
zipp==3.6.0
| name: conjure-up
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- charset-normalizer==2.0.12
- distlib==0.3.9
- filelock==3.4.1
- idna==3.10
- importlib-metadata==4.8.3
- importlib-resources==5.4.0
- iniconfig==1.1.1
- jinja2==3.0.3
- juju-wait==2.8.4
- markupsafe==2.0.1
- nose==1.3.7
- oauthlib==3.2.2
- packaging==21.3
- platformdirs==2.4.0
- pluggy==1.0.0
- prettytable==2.5.0
- progressbar2==3.55.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- python-utils==3.5.2
- pyyaml==6.0.1
- q==2.7
- requests==2.27.1
- requests-oauthlib==2.0.0
- six==1.17.0
- termcolor==1.1.0
- toml==0.10.2
- tomli==1.2.3
- tox==3.28.0
- typing-extensions==4.1.1
- urllib3==1.26.20
- urwid==2.1.2
- virtualenv==20.17.1
- wcwidth==0.2.13
- ws4py==0.3.4
- zipp==3.6.0
prefix: /opt/conda/envs/conjure-up
| [
"test/test_controllers_clouds_tui.py::CloudsTUIFinishTestCase::test_finish_w_controller"
]
| []
| [
"test/test_controllers_clouds_tui.py::CloudsTUIRenderTestCase::test_render",
"test/test_controllers_clouds_tui.py::CloudsTUIRenderTestCase::test_render_unknown",
"test/test_controllers_clouds_tui.py::CloudsTUIFinishTestCase::test_finish_no_controller"
]
| []
| MIT License | 1,115 | [
"conjureup/juju.py",
"conjureup/controllers/clouds/tui.py",
"conjureup/controllers/steps/gui.py",
"conjureup/controllers/steps/common.py",
"conjureup/controllers/steps/tui.py",
"conjureup/ui/widgets/step.py",
"README.md",
"conjureup/models/step.py"
]
| [
"conjureup/juju.py",
"conjureup/controllers/clouds/tui.py",
"conjureup/controllers/steps/gui.py",
"conjureup/controllers/steps/common.py",
"conjureup/controllers/steps/tui.py",
"conjureup/ui/widgets/step.py",
"README.md",
"conjureup/models/step.py"
]
|
|
Azure__azure-cli-2624 | 29d8a30bfb42784c61c0c28e7515937272d276bc | 2017-03-24 20:14:29 | 350bb060e1fddf437f12788afbeedaa0cf61a87f | diff --git a/azure-cli.pyproj b/azure-cli.pyproj
index 40fc0e2dc..1b3a00234 100644
--- a/azure-cli.pyproj
+++ b/azure-cli.pyproj
@@ -587,6 +587,8 @@
<Folder Include="command_modules\azure-cli-network\azure\cli\command_modules\network\mgmt_app_gateway\lib\" />
<Folder Include="command_modules\azure-cli-network\azure\cli\command_modules\network\mgmt_app_gateway\lib\models\" />
<Folder Include="command_modules\azure-cli-network\azure\cli\command_modules\network\mgmt_app_gateway\lib\operations\" />
+ <Folder Include="command_modules\azure-cli-network\azure\cli\command_modules\network\tests\" />
+ <Folder Include="command_modules\azure-cli-network\azure\cli\command_modules\network\tests\zone_files\" />
<Folder Include="command_modules\azure-cli-network\azure\cli\command_modules\network\zone_file\" />
<Folder Include="command_modules\azure-cli-network\tests\" />
<Folder Include="command_modules\azure-cli-network\tests\zone_files\" />
@@ -596,6 +598,7 @@
<Folder Include="command_modules\azure-cli-redis\azure\cli\command_modules\" />
<Folder Include="command_modules\azure-cli-redis\azure\cli\command_modules\redis\" />
<Folder Include="command_modules\azure-cli-redis\tests\" />
+ <Folder Include="command_modules\azure-cli-resource\azure\cli\command_modules\resource\tests\" />
<Folder Include="command_modules\azure-cli-resource\tests\" />
<Folder Include="command_modules\azure-cli-role\tests\" />
<Folder Include="command_modules\azure-cli-redis\azure\cli\command_modules\redis\tests\" />
diff --git a/src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/_params.py b/src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/_params.py
index fb0d3ea18..c76f2c1ff 100644
--- a/src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/_params.py
+++ b/src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/_params.py
@@ -208,7 +208,7 @@ for scope in ['vm create', 'vmss create']:
register_cli_argument(scope, 'use_unmanaged_disk', action='store_true', help='Do not use managed disk to persist VM', arg_group='Storage')
register_cli_argument(scope, 'data_disk_sizes_gb', nargs='+', type=int, help='space separated empty managed data disk sizes in GB to create', arg_group='Storage')
register_cli_argument(scope, 'image_data_disks', ignore_type)
- for item in ['storage_account', 'public_ip', 'nsg', 'nic', 'vnet', 'load_balancer']:
+ for item in ['storage_account', 'public_ip', 'nsg', 'nic', 'vnet', 'load_balancer', 'app_gateway']:
register_cli_argument(scope, '{}_type'.format(item), ignore_type)
register_cli_argument(scope, 'vnet_name', help='Name of the virtual network when creating a new one or referencing an existing one.', arg_group='Network')
@@ -231,10 +231,12 @@ register_cli_argument('vm create', 'attach_os_disk', help='Attach an existing OS
register_cli_argument('vm create', 'availability_set', help='Name or ID of an existing availability set to add the VM to. None by default.')
register_cli_argument('vmss create', 'vmss_name', name_arg_type, id_part=None, help='Name of the virtual machine scale set.', validator=process_vmss_create_namespace)
-register_cli_argument('vmss create', 'load_balancer', help='Name to use when creating a new load balancer (default) or referencing an existing one. Can also reference an existing load balancer by ID or specify "" for none.', arg_group='Load Balancer')
-register_cli_argument('vmss create', 'backend_pool_name', help='Name to use for the backend pool when creating a new load balancer.', arg_group='Load Balancer')
-register_cli_argument('vmss create', 'nat_pool_name', help='Name to use for the NAT pool when creating a new load balancer.', arg_group='Load Balancer')
-register_cli_argument('vmss create', 'backend_port', help='Backend port to open with NAT rules. Defaults to 22 on Linux and 3389 on Windows.', type=int, arg_group='Load Balancer')
+register_cli_argument('vmss create', 'load_balancer', help='Name to use when creating a new load balancer (default) or referencing an existing one. Can also reference an existing load balancer by ID or specify "" for none.', options_list=['--load-balancer', '--lb'], arg_group='Network Balancer')
+register_cli_argument('vmss create', 'application_gateway', help='Name to use when creating a new application gateway (default) or referencing an existing one. Can also reference an existing application gateway by ID or specify "" for none.', options_list=['--app-gateway'], arg_group='Network Balancer')
+register_cli_argument('vmss create', 'backend_pool_name', help='Name to use for the backend pool when creating a new load balancer or application gateway.', arg_group='Network Balancer')
+register_cli_argument('vmss create', 'nat_pool_name', help='Name to use for the NAT pool when creating a new load balancer.', options_list=['--lb-nat-pool-name', '--nat-pool-name'], arg_group='Network Balancer')
+register_cli_argument('vmss create', 'backend_port', help='When creating a new load balancer, backend port to open with NAT rules (Defaults to 22 on Linux and 3389 on Windows). When creating an application gateway, the backend port to use for the backend HTTP settings.', type=int, arg_group='Network Balancer')
+register_cli_argument('vmss create', 'app_gateway_subnet_address_prefix', help='The subnet IP address prefix to use when creating a new application gateway in CIDR format.', arg_group='Network Balancer')
register_cli_argument('vmss create', 'instance_count', help='Number of VMs in the scale set.', type=int)
register_cli_argument('vmss create', 'disable_overprovision', help='Overprovision option (see https://azure.microsoft.com/en-us/documentation/articles/virtual-machine-scale-sets-overview/ for details).', action='store_true')
register_cli_argument('vmss create', 'upgrade_policy_mode', help=None, **enum_choice_list(UpgradeMode))
diff --git a/src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/_template_builder.py b/src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/_template_builder.py
index d0440e807..5ec47d161 100644
--- a/src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/_template_builder.py
+++ b/src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/_template_builder.py
@@ -411,13 +411,10 @@ def _build_data_disks(profile, data_disk_sizes_gb, image_data_disks,
return profile
-def build_load_balancer_resource(name, location, tags, backend_pool_name, nat_pool_name,
- backend_port, frontend_ip_name, public_ip_id, subnet_id,
- private_ip_address='', private_ip_allocation='dynamic'):
- lb_id = "resourceId('Microsoft.Network/loadBalancers', '{}')".format(name)
-
+def _build_frontend_ip_config(name, public_ip_id=None, private_ip_allocation=None,
+ private_ip_address=None, subnet_id=None):
frontend_ip_config = {
- 'name': frontend_ip_name
+ 'name': name
}
if public_ip_id:
@@ -438,6 +435,109 @@ def build_load_balancer_resource(name, location, tags, backend_pool_name, nat_po
}
}
})
+ return frontend_ip_config
+
+
+def build_application_gateway_resource(name, location, tags, backend_pool_name, backend_port,
+ frontend_ip_name, public_ip_id,
+ subnet_id, gateway_subnet_id,
+ private_ip_address, private_ip_allocation):
+ frontend_ip_config = _build_frontend_ip_config(frontend_ip_name, public_ip_id,
+ private_ip_address, private_ip_allocation,
+ subnet_id)
+
+ def _ag_subresource_id(_type, name):
+ return "[concat(variables('appGwID'), '/{}/{}')]".format(_type, name)
+
+ frontend_ip_config_id = _ag_subresource_id('frontendIPConfigurations', frontend_ip_name)
+ frontend_port_id = _ag_subresource_id('frontendPorts', 'appGwFrontendPort')
+ http_listener_id = _ag_subresource_id('httpListeners', 'appGwHttpListener')
+ backend_address_pool_id = _ag_subresource_id('backendAddressPools', backend_pool_name)
+ backend_http_settings_id = _ag_subresource_id(
+ 'backendHttpSettingsCollection', 'appGwBackendHttpSettings')
+
+ ag_properties = {
+ 'backendAddressPools': [
+ {
+ 'name': backend_pool_name
+ }
+ ],
+ 'backendHttpSettingsCollection': [
+ {
+ 'name': 'appGwBackendHttpSettings',
+ 'properties': {
+ 'Port': backend_port,
+ 'Protocol': 'Http',
+ 'CookieBasedAffinity': 'Disabled'
+ }
+ }
+ ],
+ 'frontendIPConfigurations': [frontend_ip_config],
+ 'frontendPorts': [
+ {
+ 'name': 'appGwFrontendPort',
+ 'properties': {
+ 'Port': 80
+ }
+ }
+ ],
+ 'gatewayIPConfigurations': [
+ {
+ 'name': 'appGwIpConfig',
+ 'properties': {
+ 'subnet': {'id': gateway_subnet_id}
+ }
+ }
+ ],
+ 'httpListeners': [
+ {
+ 'name': 'appGwHttpListener',
+ 'properties': {
+ 'FrontendIPConfiguration': {'Id': frontend_ip_config_id},
+ 'FrontendPort': {'Id': frontend_port_id},
+ 'Protocol': 'Http',
+ 'SslCertificate': None
+ }
+ }
+ ],
+ 'sku': {
+ 'name': 'Standard_Large',
+ 'tier': 'Standard',
+ 'capacity': '10'
+ },
+ 'requestRoutingRules': [
+ {
+ 'Name': 'rule1',
+ 'properties': {
+ 'RuleType': 'Basic',
+ 'httpListener': {'id': http_listener_id},
+ 'backendAddressPool': {'id': backend_address_pool_id},
+ 'backendHttpSettings': {'id': backend_http_settings_id}
+ }
+ }
+ ]
+ }
+
+ ag = {
+ 'type': 'Microsoft.Network/applicationGateways',
+ 'name': name,
+ 'location': location,
+ 'tags': tags,
+ 'apiVersion': '2015-06-15',
+ 'dependsOn': [],
+ 'properties': ag_properties
+ }
+ return ag
+
+
+def build_load_balancer_resource(name, location, tags, backend_pool_name, nat_pool_name,
+ backend_port, frontend_ip_name, public_ip_id, subnet_id,
+ private_ip_address, private_ip_allocation):
+ lb_id = "resourceId('Microsoft.Network/loadBalancers', '{}')".format(name)
+
+ frontend_ip_config = _build_frontend_ip_config(frontend_ip_name, public_ip_id,
+ private_ip_address, private_ip_allocation,
+ subnet_id)
lb_properties = {
'backendAddressPools': [
@@ -464,13 +564,13 @@ def build_load_balancer_resource(name, location, tags, backend_pool_name, nat_po
}
lb = {
- "type": "Microsoft.Network/loadBalancers",
- "name": name,
- "location": location,
- "tags": tags,
- "apiVersion": "2015-06-15",
- "dependsOn": [],
- "properties": lb_properties
+ 'type': 'Microsoft.Network/loadBalancers',
+ 'name': name,
+ 'location': location,
+ 'tags': tags,
+ 'apiVersion': '2015-06-15',
+ 'dependsOn': [],
+ 'properties': lb_properties
}
return lb
@@ -513,8 +613,11 @@ def build_vmss_resource(name, naming_prefix, location, tags, overprovision, upgr
'subnet': {'id': subnet_id}
}
}
+
if backend_address_pool_id:
- ip_configuration['properties']['loadBalancerBackendAddressPools'] = [
+ key = 'loadBalancerBackendAddressPools' if 'loadBalancers' in backend_address_pool_id \
+ else 'ApplicationGatewayBackendAddressPools'
+ ip_configuration['properties'][key] = [
{'id': backend_address_pool_id}
]
diff --git a/src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/_validators.py b/src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/_validators.py
index 2bc5ab655..08009d150 100644
--- a/src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/_validators.py
+++ b/src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/_validators.py
@@ -199,7 +199,15 @@ storage_profile_param_options = {
}
-def _validate_required_forbidden_parameters(namespace, required, forbidden):
+network_balancer_param_options = {
+ 'application_gateway': '--application-gateway',
+ 'load_balancer': '--load-balancer',
+ 'app_gateway_subnet_address_prefix': '--app-gateway-subnet-address-prefix',
+ 'backend_pool_name': '--backend-pool-name'
+}
+
+
+def _validate_storage_required_forbidden_parameters(namespace, required, forbidden):
missing_required = [x for x in required if not getattr(namespace, x)]
included_forbidden = [x for x in forbidden if getattr(namespace, x)]
if missing_required or included_forbidden:
@@ -216,6 +224,22 @@ def _validate_required_forbidden_parameters(namespace, required, forbidden):
raise CLIError(error)
+def _validate_network_balancer_required_forbidden_parameters(namespace, required, forbidden, desc):
+ missing_required = [x for x in required if not getattr(namespace, x)]
+ included_forbidden = [x for x in forbidden if getattr(namespace, x)]
+ if missing_required or included_forbidden:
+ error = 'invalid usage for network balancer: {}'.format(desc)
+ if missing_required:
+ missing_string = ', '.join(
+ network_balancer_param_options[x] for x in missing_required)
+ error = '{}\n\tmissing: {}'.format(error, missing_string)
+ if included_forbidden:
+ forbidden_string = ', '.join(
+ network_balancer_param_options[x] for x in included_forbidden)
+ error = '{}\n\tnot applicable: {}'.format(error, forbidden_string)
+ raise CLIError(error)
+
+
def _validate_managed_disk_sku(sku):
allowed_skus = ['Premium_LRS', 'Standard_LRS']
@@ -309,7 +333,7 @@ def _validate_vm_create_storage_profile(namespace, for_scale_set=False):
namespace.storage_sku = 'Standard_LRS' if for_scale_set else 'Premium_LRS'
# Now verify that the status of required and forbidden parameters
- _validate_required_forbidden_parameters(namespace, required, forbidden)
+ _validate_storage_required_forbidden_parameters(namespace, required, forbidden)
if namespace.storage_profile == StorageProfile.ManagedCustomImage:
# extract additional information from a managed custom image
@@ -452,6 +476,10 @@ def _validate_vmss_create_subnet(namespace):
raise CLIError(err.format(namespace.instance_count))
namespace.subnet_address_prefix = '{}/{}'.format(cidr, i)
+ if namespace.app_gateway_type and namespace.app_gateway_subnet_address_prefix is None:
+ raise CLIError('Must specify --gateway-subnet-address-prefix to create an '
+ 'application gateway.')
+
def _validate_vm_create_nsg(namespace):
@@ -481,10 +509,10 @@ def _validate_vm_create_public_ip(namespace):
def _validate_vmss_create_public_ip(namespace):
- if namespace.load_balancer_type is None:
+ if namespace.load_balancer_type is None and namespace.app_gateway_type is None:
if namespace.public_ip_address:
- raise CLIError('--public-ip-address is not applicable when there is no load-balancer '
- 'attached, or implictly disabled due to 100+ instance count')
+ raise CLIError('--public-ip-address can only be used when creating a new load '
+ 'balancer or application gateway frontend.')
namespace.public_ip_address = ''
_validate_vm_create_public_ip(namespace)
@@ -696,36 +724,90 @@ def process_vm_create_namespace(namespace):
# region VMSS Create Validators
+def _get_vmss_create_instance_threshold():
+ return 100
+
+
+def _validate_vmss_create_load_balancer_or_app_gateway(namespace):
+
+ INSTANCE_THRESHOLD = _get_vmss_create_instance_threshold()
-def _validate_vmss_create_load_balancer(namespace):
# convert the single_placement_group to boolean for simpler logic beyond
if namespace.single_placement_group is None:
- namespace.single_placement_group = namespace.instance_count <= 100
+ namespace.single_placement_group = namespace.instance_count <= INSTANCE_THRESHOLD
else:
namespace.single_placement_group = (namespace.single_placement_group == 'true')
- if not namespace.single_placement_group:
+ if not namespace.single_placement_group and namespace.load_balancer:
+ raise CLIError(
+ '--load-balancer is not applicable when --single-placement-group is turned off.')
+
+ if namespace.load_balancer and namespace.application_gateway:
+ raise CLIError('incorrect usage: --load-balancer NAME_OR_ID | '
+ '--application-gateway NAME_OR_ID')
+
+ if namespace.instance_count > INSTANCE_THRESHOLD and namespace.load_balancer:
+ raise CLIError(
+ '--load-balancer cannot be used with --instance_count is > {}'.format(
+ INSTANCE_THRESHOLD))
+
+ # Resolve the type of balancer (if any) being used
+ balancer_type = 'None'
+ if namespace.load_balancer is None and namespace.application_gateway is None:
+ # use defaulting rules to determine
+ balancer_type = 'loadBalancer' if namespace.instance_count <= INSTANCE_THRESHOLD \
+ else 'applicationGateway'
+ elif namespace.load_balancer:
+ balancer_type = 'loadBalancer'
+ elif namespace.application_gateway:
+ balancer_type = 'applicationGateway'
+
+ if balancer_type == 'applicationGateway':
+
+ if namespace.application_gateway:
+ if check_existence(namespace.application_gateway, namespace.resource_group_name,
+ 'Microsoft.Network', 'applicationGateways'):
+ namespace.app_gateway_type = 'existing'
+ else:
+ namespace.app_gateway_type = 'new'
+ elif namespace.application_gateway == '':
+ namespace.app_gateway_type = None
+ elif namespace.application_gateway is None:
+ namespace.app_gateway_type = 'new'
+
+ # AppGateway frontend
+ required = []
+ if namespace.app_gateway_type == 'new':
+ required.append('app_gateway_subnet_address_prefix')
+ elif namespace.app_gateway_type == 'existing':
+ required.append('backend_pool_name')
+ forbidden = ['nat_pool_name', 'load_balancer']
+ _validate_network_balancer_required_forbidden_parameters(
+ namespace, required, forbidden, 'application gateway')
+
+ elif balancer_type == 'loadBalancer':
+ # LoadBalancer frontend
+ required = []
+ forbidden = ['app_gateway_subnet_address_prefix', 'application_gateway']
+ _validate_network_balancer_required_forbidden_parameters(
+ namespace, required, forbidden, 'load balancer')
+
if namespace.load_balancer:
- raise CLIError('--load-balancer is not applicable when --single-placement-group is '
- 'explictly turned off or implictly turned off for 100+ instance count')
- namespace.load_balancer = ''
-
- if namespace.load_balancer:
- if check_existence(namespace.load_balancer, namespace.resource_group_name,
- 'Microsoft.Network', 'loadBalancers'):
- namespace.load_balancer_type = 'existing'
- else:
+ if check_existence(namespace.load_balancer, namespace.resource_group_name,
+ 'Microsoft.Network', 'loadBalancers'):
+ namespace.load_balancer_type = 'existing'
+ else:
+ namespace.load_balancer_type = 'new'
+ elif namespace.load_balancer == '':
+ namespace.load_balancer_type = None
+ elif namespace.load_balancer is None:
namespace.load_balancer_type = 'new'
- elif namespace.load_balancer == '':
- namespace.load_balancer_type = None
- elif namespace.load_balancer is None:
- namespace.load_balancer_type = 'new'
def process_vmss_create_namespace(namespace):
get_default_location_from_resource_group(namespace)
_validate_vm_create_storage_profile(namespace, for_scale_set=True)
- _validate_vmss_create_load_balancer(namespace)
+ _validate_vmss_create_load_balancer_or_app_gateway(namespace)
_validate_vm_create_vnet(namespace, for_scale_set=True)
_validate_vmss_create_subnet(namespace)
_validate_vmss_create_public_ip(namespace)
diff --git a/src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/custom.py b/src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/custom.py
index 04e1d09f2..2c1df1830 100644
--- a/src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/custom.py
+++ b/src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/custom.py
@@ -725,7 +725,7 @@ def _get_extension_instance_name(instance_view, publisher, extension_type_name,
full_type_name = '.'.join([publisher, extension_type_name])
if instance_view.extensions:
ext = next((x for x in instance_view.extensions
- if x.type.lower() == full_type_name.lower()))
+ if x.type.lower() == full_type_name.lower()), None)
if ext:
extension_instance_name = ext.name
return extension_instance_name
@@ -1442,7 +1442,7 @@ def list_vmss_instance_connection_info(resource_group_name, vm_scale_set_name):
vm_scale_set_name)
# find the load balancer
nic_configs = vmss.virtual_machine_profile.network_profile.network_interface_configurations
- primary_nic_config = next(n for n in nic_configs if n.primary)
+ primary_nic_config = next((n for n in nic_configs if n.primary), None)
if primary_nic_config is None:
raise CLIError('could not find a primary nic which is needed to search to load balancer')
ip_configs = primary_nic_config.ip_configurations
@@ -1659,7 +1659,9 @@ def create_vmss(vmss_name, resource_group_name, image,
admin_username=getpass.getuser(), admin_password=None, authentication_type=None,
vm_sku="Standard_D1_v2", no_wait=False,
ssh_dest_key_path=None, ssh_key_value=None, generate_ssh_keys=False,
- load_balancer=None, backend_pool_name=None, nat_pool_name=None, backend_port=None,
+ load_balancer=None, application_gateway=None,
+ app_gateway_subnet_address_prefix=None,
+ backend_pool_name=None, nat_pool_name=None, backend_port=None,
public_ip_address=None, public_ip_address_allocation='dynamic',
public_ip_address_dns_name=None,
os_caching=None, data_caching=None,
@@ -1669,7 +1671,8 @@ def create_vmss(vmss_name, resource_group_name, image,
vnet_name=None, vnet_address_prefix='10.0.0.0/16',
subnet=None, subnet_address_prefix=None,
os_offer=None, os_publisher=None, os_sku=None, os_version=None,
- load_balancer_type=None, vnet_type=None, public_ip_type=None, storage_profile=None,
+ load_balancer_type=None, app_gateway_type=None, vnet_type=None,
+ public_ip_type=None, storage_profile=None,
single_placement_group=None, custom_data=None, secrets=None):
from azure.cli.core.commands.client_factory import get_subscription_id
from azure.cli.core._util import random_string
@@ -1677,7 +1680,7 @@ def create_vmss(vmss_name, resource_group_name, image,
ArmTemplateBuilder, StorageProfile, build_vmss_resource, build_storage_account_resource,
build_vnet_resource, build_public_ip_resource, build_load_balancer_resource,
build_output_deployment_resource, build_deployment_resource,
- build_vmss_storage_account_pool_resource)
+ build_vmss_storage_account_pool_resource, build_application_gateway_resource)
from azure.cli.core._profile import CLOUD
from azure.mgmt.resource.resources import ResourceManagementClient
@@ -1688,6 +1691,10 @@ def create_vmss(vmss_name, resource_group_name, image,
subscription=get_subscription_id(), resource_group=resource_group_name,
namespace='Microsoft.Network')
+ scrubbed_name = vmss_name.replace('-', '').lower()[:5]
+ naming_prefix = '{}{}'.format(scrubbed_name,
+ random_string(9 - len(scrubbed_name), force_lower=True))
+
# determine final defaults and calculated values
tags = tags or {}
os_disk_name = os_disk_name or 'osdisk_{}'.format(random_string(10))
@@ -1698,16 +1705,33 @@ def create_vmss(vmss_name, resource_group_name, image,
master_template = ArmTemplateBuilder()
vmss_dependencies = []
+
+ # VNET will always be a dependency
if vnet_type == 'new':
vnet_name = vnet_name or '{}VNET'.format(vmss_name)
subnet = subnet or '{}Subnet'.format(vmss_name)
vmss_dependencies.append('Microsoft.Network/virtualNetworks/{}'.format(vnet_name))
- master_template.add_resource(build_vnet_resource(
- vnet_name, location, tags, vnet_address_prefix, subnet, subnet_address_prefix))
+ vnet = build_vnet_resource(
+ vnet_name, location, tags, vnet_address_prefix, subnet, subnet_address_prefix)
+ if app_gateway_type:
+ vnet['properties']['subnets'].append({
+ 'name': 'appGwSubnet',
+ 'properties': {
+ 'addressPrefix': app_gateway_subnet_address_prefix
+ }
+ })
+ master_template.add_resource(vnet)
subnet_id = subnet if is_valid_resource_id(subnet) else \
'{}/virtualNetworks/{}/subnets/{}'.format(network_id_template, vnet_name, subnet)
+ gateway_subnet_id = \
+ '{}/virtualNetworks/{}/subnets/appGwSubnet'.format(network_id_template, vnet_name) \
+ if app_gateway_type == 'new' else None
+
+ # public IP is used by either load balancer/application gateway
+ public_ip_address_id = public_ip_address if is_valid_resource_id(public_ip_address) else None
+ # Handle load balancer creation
if load_balancer_type == 'new':
load_balancer = load_balancer or '{}LB'.format(vmss_name)
vmss_dependencies.append('Microsoft.Network/loadBalancers/{}'.format(load_balancer))
@@ -1720,10 +1744,8 @@ def create_vmss(vmss_name, resource_group_name, image,
tags,
public_ip_address_allocation,
public_ip_address_dns_name))
- public_ip_address_id = None
- if public_ip_address:
- public_ip_address_id = public_ip_address if is_valid_resource_id(public_ip_address) \
- else '{}/publicIPAddresses/{}'.format(network_id_template, public_ip_address)
+ public_ip_address_id = '{}/publicIPAddresses/{}'.format(network_id_template,
+ public_ip_address)
# calculate default names if not provided
backend_pool_name = backend_pool_name or '{}BEPool'.format(load_balancer)
@@ -1733,14 +1755,43 @@ def create_vmss(vmss_name, resource_group_name, image,
lb_resource = build_load_balancer_resource(
load_balancer, location, tags, backend_pool_name, nat_pool_name, backend_port,
- 'loadBalancerFrontEnd', public_ip_address_id, subnet_id)
+ 'loadBalancerFrontEnd', public_ip_address_id, subnet_id,
+ private_ip_address='', private_ip_allocation='Dynamic')
lb_resource['dependsOn'] = lb_dependencies
master_template.add_resource(lb_resource)
- scrubbed_name = vmss_name.replace('-', '').lower()[:5]
- naming_prefix = '{}{}'.format(scrubbed_name,
- random_string(9 - len(scrubbed_name), force_lower=True))
+ # Or handle application gateway creation
+ app_gateway = application_gateway
+ if app_gateway_type == 'new':
+ app_gateway = application_gateway or '{}AG'.format(vmss_name)
+ vmss_dependencies.append('Microsoft.Network/applicationGateways/{}'.format(app_gateway))
+
+ ag_dependencies = []
+ if public_ip_type == 'new':
+ public_ip_address = public_ip_address or '{}PublicIP'.format(app_gateway)
+ ag_dependencies.append('Microsoft.Network/publicIpAddresses/{}'.format(public_ip_address)) # pylint: disable=line-too-long
+ master_template.add_resource(build_public_ip_resource(public_ip_address, location,
+ tags,
+ public_ip_address_allocation,
+ public_ip_address_dns_name))
+ public_ip_address_id = '{}/publicIPAddresses/{}'.format(network_id_template,
+ public_ip_address)
+ # calculate default names if not provided
+ backend_pool_name = backend_pool_name or '{}BEPool'.format(app_gateway)
+ backend_port = backend_port or 80
+
+ ag_resource = build_application_gateway_resource(
+ app_gateway, location, tags, backend_pool_name, backend_port, 'appGwFrontendIP',
+ public_ip_address_id, subnet_id, gateway_subnet_id, private_ip_address='',
+ private_ip_allocation='Dynamic')
+ ag_resource['dependsOn'] = ag_dependencies
+ master_template.add_variable(
+ 'appGwID',
+ "[resourceId('Microsoft.Network/applicationGateways', '{}')]".format(app_gateway))
+ master_template.add_resource(ag_resource)
+
+ # create storage accounts if needed for unmanaged disk storage
if storage_profile in [StorageProfile.SACustomImage, StorageProfile.SAPirImage]:
master_template.add_resource(build_vmss_storage_account_pool_resource(
'storageLoop', location, tags, storage_sku))
@@ -1755,18 +1806,27 @@ def create_vmss(vmss_name, resource_group_name, image,
backend_address_pool_id = None
inbound_nat_pool_id = None
- if is_valid_resource_id(load_balancer):
- backend_address_pool_id = \
- '{}/backendAddressPools/{}'.format(load_balancer, backend_pool_name) \
- if load_balancer_type else None
- inbound_nat_pool_id = '{}/inboundNatPools/{}'.format(load_balancer, nat_pool_name) \
- if load_balancer_type == 'new' else None
- else:
- backend_address_pool_id = '{}/loadBalancers/{}/backendAddressPools/{}'.format(
- network_id_template, load_balancer, backend_pool_name) if load_balancer_type else None
- inbound_nat_pool_id = '{}/loadBalancers/{}/inboundNatPools/{}'.format(
- network_id_template, load_balancer, nat_pool_name) if load_balancer_type == 'new' \
- else None
+ if load_balancer_type or app_gateway_type:
+ network_balancer = load_balancer or app_gateway
+ balancer_type = 'loadBalancers' if load_balancer_type else 'applicationGateways'
+
+ if is_valid_resource_id(network_balancer):
+ # backend address pool needed by load balancer or app gateway
+ backend_address_pool_id = \
+ '{}/backendAddressPools/{}'.format(network_balancer, backend_pool_name)
+
+ # nat pool only applies to new load balancers
+ inbound_nat_pool_id = '{}/inboundNatPools/{}'.format(load_balancer, nat_pool_name) \
+ if load_balancer_type == 'new' else None
+ else:
+ # backend address pool needed by load balancer or app gateway
+ backend_address_pool_id = '{}/{}/{}/backendAddressPools/{}'.format(
+ network_id_template, balancer_type, network_balancer, backend_pool_name)
+
+ # nat pool only applies to new load balancers
+ inbound_nat_pool_id = '{}/loadBalancers/{}/inboundNatPools/{}'.format(
+ network_id_template, load_balancer, nat_pool_name) if load_balancer_type == 'new' \
+ else None
ip_config_name = '{}IPConfig'.format(naming_prefix)
nic_name = '{}Nic'.format(naming_prefix)
| `az vm extension set` crashes
```
az vm extension set -n IaaSAntimalware --publisher Microsoft.Azure.Security -g TestDesktop_RG --vm-name vmAWB10Test09
Traceback (most recent call last):
File "/usr/local/Cellar/python3/3.6.0_1/Frameworks/Python.framework/Versions/3.6/lib/python3.6/site-packages/azure/cli/main.py", line 37, in main
cmd_result = APPLICATION.execute(args)
File "/usr/local/Cellar/python3/3.6.0_1/Frameworks/Python.framework/Versions/3.6/lib/python3.6/site-packages/azure/cli/core/application.py", line 157, in execute
result = expanded_arg.func(params)
File "/usr/local/Cellar/python3/3.6.0_1/Frameworks/Python.framework/Versions/3.6/lib/python3.6/site-packages/azure/cli/core/commands/__init__.py", line 358, in _execute_command
raise ex
File "/usr/local/Cellar/python3/3.6.0_1/Frameworks/Python.framework/Versions/3.6/lib/python3.6/site-packages/azure/cli/core/commands/__init__.py", line 353, in _execute_command
result = op(client, **kwargs) if client else op(**kwargs)
File "/usr/local/Cellar/python3/3.6.0_1/Frameworks/Python.framework/Versions/3.6/lib/python3.6/site-packages/azure/cli/command_modules/vm/custom.py", line 861, in set_extension
instance_name = _get_extension_instance_name(vm.instance_view, publisher, vm_extension_name)
File "/usr/local/Cellar/python3/3.6.0_1/Frameworks/Python.framework/Versions/3.6/lib/python3.6/site-packages/azure/cli/command_modules/vm/custom.py", line 712, in _get_extension_instance_name
ext = next((x for x in instance_view.extensions
StopIteration
``` | Azure/azure-cli | diff --git a/src/command_modules/azure-cli-vm/tests/test_custom_vm_commands.py b/src/command_modules/azure-cli-vm/tests/test_custom_vm_commands.py
index 7ea9fe5ab..29b1c0205 100644
--- a/src/command_modules/azure-cli-vm/tests/test_custom_vm_commands.py
+++ b/src/command_modules/azure-cli-vm/tests/test_custom_vm_commands.py
@@ -12,7 +12,8 @@ from azure.cli.command_modules.vm.custom import enable_boot_diagnostics, disable
_merge_secrets
from azure.cli.command_modules.vm.custom import (_get_access_extension_upgrade_info,
_LINUX_ACCESS_EXT,
- _WINDOWS_ACCESS_EXT)
+ _WINDOWS_ACCESS_EXT,
+ _get_extension_instance_name)
from azure.cli.command_modules.vm.custom import \
(attach_unmanaged_data_disk, detach_data_disk, get_vmss_instance_view)
from azure.cli.command_modules.vm.disk_encryption import enable, disable, _check_encrypt_is_supported
@@ -342,6 +343,18 @@ class Test_Vm_Custom(unittest.TestCase):
vault123Expected.sort(key=lambda x: x['certificateUrl'])
self.assertListEqual(vault123Expected, vault123)
+ def test_get_extension_instance_name(self):
+ instance_view = mock.MagicMock()
+ extension = mock.MagicMock()
+ extension.type = 'publisher2.extension2'
+ instance_view.extensions = [extension]
+
+ # action
+ result = _get_extension_instance_name(instance_view, 'publisher1', 'extension1')
+
+ # assert
+ self.assertEqual(result, 'extension1')
+
class FakedVM: # pylint: disable=too-few-public-methods,old-style-class
def __init__(self, nics=None, disks=None, os_disk=None):
diff --git a/src/command_modules/azure-cli-vm/tests/test_vm_defaults.py b/src/command_modules/azure-cli-vm/tests/test_vm_defaults.py
index 9543ccd5f..4ebb92bb8 100644
--- a/src/command_modules/azure-cli-vm/tests/test_vm_defaults.py
+++ b/src/command_modules/azure-cli-vm/tests/test_vm_defaults.py
@@ -175,6 +175,8 @@ class TestVMSSCreateDefaultVnet(unittest.TestCase):
ns.vnet_type = 'new'
ns.vnet_address_prefix = '10.0.0.0/16'
ns.subnet_address_prefix = None
+ ns.app_gateway_type = 'new'
+ ns.app_gateway_subnet_address_prefix = '10.0.1.0/22'
ns.instance_count = 1000
_validate_vmss_create_subnet(ns)
self.assertEqual('10.0.0.0/22', ns.subnet_address_prefix)
@@ -184,6 +186,8 @@ class TestVMSSCreateDefaultVnet(unittest.TestCase):
ns.vnet_type = 'new'
ns.vnet_address_prefix = '10.0.0.0/16'
ns.subnet_address_prefix = None
+ ns.app_gateway_type = None
+ ns.app_gateway_subnet_address_prefix = None
ns.instance_count = 2
_validate_vmss_create_subnet(ns)
self.assertEqual('10.0.0.0/24', ns.subnet_address_prefix)
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 3,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 5
} | 0.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "python scripts/dev_setup.py",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | adal==0.4.3
applicationinsights==0.10.0
argcomplete==1.8.0
astroid==1.4.9
attrs==22.2.0
autopep8==1.2.4
azure-batch==2.0.0
-e git+https://github.com/Azure/azure-cli.git@29d8a30bfb42784c61c0c28e7515937272d276bc#egg=azure_cli&subdirectory=src/azure-cli
-e git+https://github.com/Azure/azure-cli.git@29d8a30bfb42784c61c0c28e7515937272d276bc#egg=azure_cli_acr&subdirectory=src/command_modules/azure-cli-acr
-e git+https://github.com/Azure/azure-cli.git@29d8a30bfb42784c61c0c28e7515937272d276bc#egg=azure_cli_acs&subdirectory=src/command_modules/azure-cli-acs
-e git+https://github.com/Azure/azure-cli.git@29d8a30bfb42784c61c0c28e7515937272d276bc#egg=azure_cli_appservice&subdirectory=src/command_modules/azure-cli-appservice
-e git+https://github.com/Azure/azure-cli.git@29d8a30bfb42784c61c0c28e7515937272d276bc#egg=azure_cli_batch&subdirectory=src/command_modules/azure-cli-batch
-e git+https://github.com/Azure/azure-cli.git@29d8a30bfb42784c61c0c28e7515937272d276bc#egg=azure_cli_cloud&subdirectory=src/command_modules/azure-cli-cloud
-e git+https://github.com/Azure/azure-cli.git@29d8a30bfb42784c61c0c28e7515937272d276bc#egg=azure_cli_component&subdirectory=src/command_modules/azure-cli-component
-e git+https://github.com/Azure/azure-cli.git@29d8a30bfb42784c61c0c28e7515937272d276bc#egg=azure_cli_configure&subdirectory=src/command_modules/azure-cli-configure
-e git+https://github.com/Azure/azure-cli.git@29d8a30bfb42784c61c0c28e7515937272d276bc#egg=azure_cli_container&subdirectory=src/command_modules/azure-cli-container
-e git+https://github.com/Azure/azure-cli.git@29d8a30bfb42784c61c0c28e7515937272d276bc#egg=azure_cli_core&subdirectory=src/azure-cli-core
-e git+https://github.com/Azure/azure-cli.git@29d8a30bfb42784c61c0c28e7515937272d276bc#egg=azure_cli_datalake&subdirectory=src/command_modules/azure-cli-datalake
-e git+https://github.com/Azure/azure-cli.git@29d8a30bfb42784c61c0c28e7515937272d276bc#egg=azure_cli_documentdb&subdirectory=src/command_modules/azure-cli-documentdb
-e git+https://github.com/Azure/azure-cli.git@29d8a30bfb42784c61c0c28e7515937272d276bc#egg=azure_cli_feedback&subdirectory=src/command_modules/azure-cli-feedback
-e git+https://github.com/Azure/azure-cli.git@29d8a30bfb42784c61c0c28e7515937272d276bc#egg=azure_cli_find&subdirectory=src/command_modules/azure-cli-find
-e git+https://github.com/Azure/azure-cli.git@29d8a30bfb42784c61c0c28e7515937272d276bc#egg=azure_cli_iot&subdirectory=src/command_modules/azure-cli-iot
-e git+https://github.com/Azure/azure-cli.git@29d8a30bfb42784c61c0c28e7515937272d276bc#egg=azure_cli_keyvault&subdirectory=src/command_modules/azure-cli-keyvault
-e git+https://github.com/Azure/azure-cli.git@29d8a30bfb42784c61c0c28e7515937272d276bc#egg=azure_cli_monitor&subdirectory=src/command_modules/azure-cli-monitor
-e git+https://github.com/Azure/azure-cli.git@29d8a30bfb42784c61c0c28e7515937272d276bc#egg=azure_cli_network&subdirectory=src/command_modules/azure-cli-network
-e git+https://github.com/Azure/azure-cli.git@29d8a30bfb42784c61c0c28e7515937272d276bc#egg=azure_cli_nspkg&subdirectory=src/azure-cli-nspkg
-e git+https://github.com/Azure/azure-cli.git@29d8a30bfb42784c61c0c28e7515937272d276bc#egg=azure_cli_profile&subdirectory=src/command_modules/azure-cli-profile
-e git+https://github.com/Azure/azure-cli.git@29d8a30bfb42784c61c0c28e7515937272d276bc#egg=azure_cli_redis&subdirectory=src/command_modules/azure-cli-redis
-e git+https://github.com/Azure/azure-cli.git@29d8a30bfb42784c61c0c28e7515937272d276bc#egg=azure_cli_resource&subdirectory=src/command_modules/azure-cli-resource
-e git+https://github.com/Azure/azure-cli.git@29d8a30bfb42784c61c0c28e7515937272d276bc#egg=azure_cli_role&subdirectory=src/command_modules/azure-cli-role
-e git+https://github.com/Azure/azure-cli.git@29d8a30bfb42784c61c0c28e7515937272d276bc#egg=azure_cli_sql&subdirectory=src/command_modules/azure-cli-sql
-e git+https://github.com/Azure/azure-cli.git@29d8a30bfb42784c61c0c28e7515937272d276bc#egg=azure_cli_storage&subdirectory=src/command_modules/azure-cli-storage
-e git+https://github.com/Azure/azure-cli.git@29d8a30bfb42784c61c0c28e7515937272d276bc#egg=azure_cli_taskhelp&subdirectory=src/command_modules/azure-cli-taskhelp
-e git+https://github.com/Azure/azure-cli.git@29d8a30bfb42784c61c0c28e7515937272d276bc#egg=azure_cli_testsdk&subdirectory=src/azure-cli-testsdk
-e git+https://github.com/Azure/azure-cli.git@29d8a30bfb42784c61c0c28e7515937272d276bc#egg=azure_cli_utility_automation&subdirectory=scripts
-e git+https://github.com/Azure/azure-cli.git@29d8a30bfb42784c61c0c28e7515937272d276bc#egg=azure_cli_vm&subdirectory=src/command_modules/azure-cli-vm
azure-common==1.1.4
azure-core==1.24.2
azure-datalake-store==0.0.5
azure-graphrbac==0.30.0rc6
azure-keyvault==0.1.0
azure-mgmt-authorization==0.30.0rc6
azure-mgmt-batch==3.0.0
azure-mgmt-compute==0.33.1rc1
azure-mgmt-containerregistry==0.2.0
azure-mgmt-datalake-analytics==0.1.3
azure-mgmt-datalake-nspkg==3.0.1
azure-mgmt-datalake-store==0.1.3
azure-mgmt-dns==1.0.0
azure-mgmt-documentdb==0.1.0
azure-mgmt-iothub==0.2.1
azure-mgmt-keyvault==0.30.0
azure-mgmt-monitor==0.1.0
azure-mgmt-network==0.30.0
azure-mgmt-nspkg==3.0.2
azure-mgmt-redis==1.0.0
azure-mgmt-resource==0.30.2
azure-mgmt-sql==0.3.3
azure-mgmt-storage==0.31.0
azure-mgmt-trafficmanager==0.30.0rc6
azure-mgmt-web==0.31.0
azure-monitor==0.2.0
azure-nspkg==3.0.2
azure-storage==0.33.0
certifi==2021.5.30
cffi==1.15.1
colorama==0.3.7
coverage==4.2
cryptography==40.0.2
flake8==3.2.1
importlib-metadata==4.8.3
iniconfig==1.1.1
isodate==0.7.0
jeepney==0.7.1
jmespath==0.10.0
keyring==23.4.1
lazy-object-proxy==1.7.1
mccabe==0.5.3
mock==1.3.0
msrest==0.4.29
msrestazure==0.4.34
nose==1.3.7
oauthlib==3.2.2
packaging==21.3
paramiko==2.0.2
pbr==6.1.1
pep8==1.7.1
pluggy==1.0.0
py==1.11.0
pyasn1==0.5.1
pycodestyle==2.2.0
pycparser==2.21
pyflakes==1.3.0
Pygments==2.1.3
PyJWT==2.4.0
pylint==1.5.4
pyOpenSSL==16.2.0
pyparsing==3.1.4
pytest==7.0.1
python-dateutil==2.9.0.post0
PyYAML==3.11
requests==2.9.1
requests-oauthlib==2.0.0
scp==0.15.0
SecretStorage==3.3.3
six==1.10.0
sshtunnel==0.4.0
tabulate==0.7.5
tomli==1.2.3
typing-extensions==4.1.1
urllib3==1.16
vcrpy==1.10.3
Whoosh==2.7.4
wrapt==1.16.0
xmltodict==0.14.2
zipp==3.6.0
| name: azure-cli
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- adal==0.4.3
- applicationinsights==0.10.0
- argcomplete==1.8.0
- astroid==1.4.9
- attrs==22.2.0
- autopep8==1.2.4
- azure-batch==2.0.0
- azure-common==1.1.4
- azure-core==1.24.2
- azure-datalake-store==0.0.5
- azure-graphrbac==0.30.0rc6
- azure-keyvault==0.1.0
- azure-mgmt-authorization==0.30.0rc6
- azure-mgmt-batch==3.0.0
- azure-mgmt-compute==0.33.1rc1
- azure-mgmt-containerregistry==0.2.0
- azure-mgmt-datalake-analytics==0.1.3
- azure-mgmt-datalake-nspkg==3.0.1
- azure-mgmt-datalake-store==0.1.3
- azure-mgmt-dns==1.0.0
- azure-mgmt-documentdb==0.1.0
- azure-mgmt-iothub==0.2.1
- azure-mgmt-keyvault==0.30.0
- azure-mgmt-monitor==0.1.0
- azure-mgmt-network==0.30.0
- azure-mgmt-nspkg==3.0.2
- azure-mgmt-redis==1.0.0
- azure-mgmt-resource==0.30.2
- azure-mgmt-sql==0.3.3
- azure-mgmt-storage==0.31.0
- azure-mgmt-trafficmanager==0.30.0rc6
- azure-mgmt-web==0.31.0
- azure-monitor==0.2.0
- azure-nspkg==3.0.2
- azure-storage==0.33.0
- cffi==1.15.1
- colorama==0.3.7
- coverage==4.2
- cryptography==40.0.2
- flake8==3.2.1
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- isodate==0.7.0
- jeepney==0.7.1
- jmespath==0.10.0
- keyring==23.4.1
- lazy-object-proxy==1.7.1
- mccabe==0.5.3
- mock==1.3.0
- msrest==0.4.29
- msrestazure==0.4.34
- nose==1.3.7
- oauthlib==3.2.2
- packaging==21.3
- paramiko==2.0.2
- pbr==6.1.1
- pep8==1.7.1
- pip==9.0.1
- pluggy==1.0.0
- py==1.11.0
- pyasn1==0.5.1
- pycodestyle==2.2.0
- pycparser==2.21
- pyflakes==1.3.0
- pygments==2.1.3
- pyjwt==2.4.0
- pylint==1.5.4
- pyopenssl==16.2.0
- pyparsing==3.1.4
- pytest==7.0.1
- python-dateutil==2.9.0.post0
- pyyaml==3.11
- requests==2.9.1
- requests-oauthlib==2.0.0
- scp==0.15.0
- secretstorage==3.3.3
- setuptools==30.4.0
- six==1.10.0
- sshtunnel==0.4.0
- tabulate==0.7.5
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.16
- vcrpy==1.10.3
- whoosh==2.7.4
- wrapt==1.16.0
- xmltodict==0.14.2
- zipp==3.6.0
prefix: /opt/conda/envs/azure-cli
| [
"src/command_modules/azure-cli-vm/tests/test_custom_vm_commands.py::Test_Vm_Custom::test_get_extension_instance_name"
]
| []
| [
"src/command_modules/azure-cli-vm/tests/test_custom_vm_commands.py::Test_Vm_Custom::test_attach_existing_datadisk_on_vm",
"src/command_modules/azure-cli-vm/tests/test_custom_vm_commands.py::Test_Vm_Custom::test_attach_new_datadisk_custom_on_vm",
"src/command_modules/azure-cli-vm/tests/test_custom_vm_commands.py::Test_Vm_Custom::test_attach_new_datadisk_default_on_vm",
"src/command_modules/azure-cli-vm/tests/test_custom_vm_commands.py::Test_Vm_Custom::test_deattach_disk_on_vm",
"src/command_modules/azure-cli-vm/tests/test_custom_vm_commands.py::Test_Vm_Custom::test_disable_boot_diagnostics_on_vm",
"src/command_modules/azure-cli-vm/tests/test_custom_vm_commands.py::Test_Vm_Custom::test_disable_encryption_error_cases_handling",
"src/command_modules/azure-cli-vm/tests/test_custom_vm_commands.py::Test_Vm_Custom::test_enable_boot_diagnostics_on_vm_never_enabled",
"src/command_modules/azure-cli-vm/tests/test_custom_vm_commands.py::Test_Vm_Custom::test_enable_boot_diagnostics_skip_when_enabled_already",
"src/command_modules/azure-cli-vm/tests/test_custom_vm_commands.py::Test_Vm_Custom::test_enable_encryption_error_cases_handling",
"src/command_modules/azure-cli-vm/tests/test_custom_vm_commands.py::Test_Vm_Custom::test_encryption_distro_check",
"src/command_modules/azure-cli-vm/tests/test_custom_vm_commands.py::Test_Vm_Custom::test_get_access_extension_upgrade_info",
"src/command_modules/azure-cli-vm/tests/test_custom_vm_commands.py::Test_Vm_Custom::test_merge_secrets",
"src/command_modules/azure-cli-vm/tests/test_custom_vm_commands.py::Test_Vm_Custom::test_show_vmss_instance_view",
"src/command_modules/azure-cli-vm/tests/test_vm_defaults.py::TestVMCreateDefaultVnet::test_matching_vnet_specified_location",
"src/command_modules/azure-cli-vm/tests/test_vm_defaults.py::TestVMCreateDefaultVnet::test_no_matching_vnet",
"src/command_modules/azure-cli-vm/tests/test_vm_defaults.py::TestVMSSCreateDefaultVnet::test_matching_vnet_no_subnet_size_matching",
"src/command_modules/azure-cli-vm/tests/test_vm_defaults.py::TestVMSSCreateDefaultVnet::test_matching_vnet_subnet_size_matching",
"src/command_modules/azure-cli-vm/tests/test_vm_defaults.py::TestVMSSCreateDefaultVnet::test_new_subnet_size_for_big_vmss",
"src/command_modules/azure-cli-vm/tests/test_vm_defaults.py::TestVMSSCreateDefaultVnet::test_new_subnet_size_for_small_vmss",
"src/command_modules/azure-cli-vm/tests/test_vm_defaults.py::TestVMCreateDefaultStorageAccount::test_matching_storage_account_specified_location",
"src/command_modules/azure-cli-vm/tests/test_vm_defaults.py::TestVMCreateDefaultStorageAccount::test_no_matching_storage_account",
"src/command_modules/azure-cli-vm/tests/test_vm_defaults.py::TestVMDefaultAuthType::test_default_linux",
"src/command_modules/azure-cli-vm/tests/test_vm_defaults.py::TestVMDefaultAuthType::test_default_windows",
"src/command_modules/azure-cli-vm/tests/test_vm_defaults.py::TestVMDefaultAuthType::test_linux_with_password"
]
| []
| MIT License | 1,116 | [
"azure-cli.pyproj",
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/_params.py",
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/_template_builder.py",
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/custom.py",
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/_validators.py"
]
| [
"azure-cli.pyproj",
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/_params.py",
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/_template_builder.py",
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/custom.py",
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/_validators.py"
]
|
|
openmrslab__suspect-43 | 522105c8852802054a8849405e74c8cb8fa2bc60 | 2017-03-25 20:13:05 | 964f2460e46378c29e78d280f999128f34e829df | coveralls:
[](https://coveralls.io/builds/10772873)
Coverage increased (+0.2%) to 75.135% when pulling **a63bfd373567d86cbfe31d411b34652e2f3e284c on mrsobjects_inheritance** into **522105c8852802054a8849405e74c8cb8fa2bc60 on master**.
| diff --git a/requirements.txt b/requirements.txt
index 5af82e3..76bf75c 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -4,3 +4,4 @@ nbsphinx
ipykernel
numpy
lmfit
+pydicom
diff --git a/suspect/__init__.py b/suspect/__init__.py
index 53dcc1a..5a58e22 100644
--- a/suspect/__init__.py
+++ b/suspect/__init__.py
@@ -1,4 +1,4 @@
-from suspect.mrsdata import MRSData
+from suspect.mrsobjects import MRSBase, MRSData, MRSSpectrum
from suspect._transforms import *
from . import processing
from . import io
diff --git a/suspect/core.py b/suspect/core.py
index e667fe1..d34ae77 100644
--- a/suspect/core.py
+++ b/suspect/core.py
@@ -1,14 +1,11 @@
-from .mrsdata import MRSData
-
-
def adjust_phase(data, zero_phase, first_phase=0, fixed_frequency=0):
"""
- Adjust the phase of an MRSData object
+ Adjust the phase of an MRSBase object
Parameters
----------
- data : MRSData
- The MRSData object to be phased
+ data : MRSSpectrum
+ The MRSSpectrum object to be phased
zero_phase : scalar
The change to the zero order phase, in radians
first_phase : scalar, optional
@@ -19,15 +16,15 @@ def adjust_phase(data, zero_phase, first_phase=0, fixed_frequency=0):
Returns
-------
- out : MRSData
- A new MRSData object with adjusted phase.
+ out : MRSSpectrum
+ A new MRSSpectrum object with adjusted phase.
"""
return data.adjust_phase(zero_phase, first_phase, fixed_frequency)
def adjust_frequency(data, frequency_shift):
"""
- Adjust the centre frequency of an MRSData object.
+ Adjust the centre frequency of an MRSBase object.
Parameters
----------
diff --git a/suspect/mrsdata.py b/suspect/mrsobjects.py
similarity index 67%
rename from suspect/mrsdata.py
rename to suspect/mrsobjects.py
index ebe8a9f..a1fa3a9 100644
--- a/suspect/mrsdata.py
+++ b/suspect/mrsobjects.py
@@ -1,7 +1,7 @@
import numpy
-class MRSData(numpy.ndarray):
+class MRSBase(numpy.ndarray):
"""
numpy.ndarray subclass with additional metadata like sampling rate and echo
time.
@@ -22,7 +22,7 @@ class MRSData(numpy.ndarray):
return obj
def __array_finalize__(self, obj):
- # if this instance is being created by slicing from another MRSData, copy the parameters across
+ # if this instance is being created by slicing from another MRSBase, copy the parameters across
self._dt = getattr(obj, 'dt', None)
self._f0 = getattr(obj, 'f0', None)
self._te = getattr(obj, 'te', 30)
@@ -38,25 +38,25 @@ class MRSData(numpy.ndarray):
return numpy.ndarray.__array_wrap__(self, obj)
def __str__(self):
- return "<MRSData instance f0={0}MHz TE={1}ms dt={2}ms>".format(self.f0, self.te, self.dt * 1e3)
+ return "<MRSBase instance f0={0}MHz TE={1}ms dt={2}ms>".format(self.f0, self.te, self.dt * 1e3)
def inherit(self, new_array):
- """Converts a generic numpy ndarray into an MRSData instance by copying its own MRS specific parameters.
+ """Converts a generic numpy ndarray into an MRSBase instance by copying its own MRS specific parameters.
- This is useful when performing some processing on the MRSData object gives a bare ndarray result.
+ This is useful when performing some processing on the MRSBase object gives a bare ndarray result.
Parameters
----------
new_array : numpy ndarray
- Generic ndarray to be converted to MRSData.
+ Generic ndarray to be converted to MRSBase.
Returns
-------
- cast_array : MRSData
- New MRSData instance with data from new_array and parameters from self.
+ cast_array : MRSBase
+ New MRSBase instance with data from new_array and parameters from self.
"""
- cast_array = new_array.view(MRSData)
+ cast_array = new_array.view(type(self))
cast_array._dt = self.dt
cast_array._f0 = self.f0
cast_array._te = self.te
@@ -110,16 +110,6 @@ class MRSData(numpy.ndarray):
"""
return self._f0
- def spectrum(self):
- """
- Returns
- -------
- ndarray
- The Fourier-transformed and shifted data
-
- """
- return numpy.fft.fftshift(numpy.fft.fft(self, axis=-1), axes=-1)
-
def hertz_to_ppm(self, frequency):
"""Converts a frequency in Hertz to the corresponding PPM for this dataset.
@@ -199,7 +189,7 @@ class MRSData(numpy.ndarray):
return numpy.prod(self.voxel_dimensions)
def to_scanner(self, x, y, z):
- """Converts a 3d position in MRSData space to the scanner reference frame
+ """Converts a 3d position in MRSBase space to the scanner reference frame
Parameters
----------
@@ -217,14 +207,14 @@ class MRSData(numpy.ndarray):
"""
if self.transform is None:
- raise ValueError("No transform set for MRSData object {}".format(self))
+ raise ValueError("No transform set for MRSBase object {}".format(self))
transformed_point = self.transform * numpy.matrix([x, y, z, 1]).T
return numpy.squeeze(numpy.asarray(transformed_point))[0:3]
def from_scanner(self, x, y, z):
- """Converts a 3d position in the scanner reference frame to the MRSData space
+ """Converts a 3d position in the scanner reference frame to the MRSBase space
Parameters
----------
@@ -238,37 +228,131 @@ class MRSData(numpy.ndarray):
Returns
-------
ndarray
- Squeezed ndarray representing a point in 3d MRSData space
+ Squeezed ndarray representing a point in 3d MRSBase space
"""
if self.transform is None:
- raise ValueError("No transform set for MRSData object {}".format(self))
+ raise ValueError("No transform set for MRSBase object {}".format(self))
transformed_point = numpy.linalg.inv(self.transform) * numpy.matrix([x, y, z, 1]).T
return numpy.squeeze(numpy.asarray(transformed_point))[0:3]
- def adjust_phase(self, zero_phase, first_phase=0, fixed_frequency=0):
+
+class MRSData(MRSBase):
+ """
+ MRS data in the time domain.
+ """
+
+ def spectrum(self):
+ """
+ Returns
+ -------
+ MRSSpectrum
+ The Fourier-transformed and shifted data, represented as a spectrum
+
+ """
+ spectrum = self.inherit(numpy.fft.fftshift(numpy.fft.fft(self, axis=-1), axes=-1)).view(MRSSpectrum)
+ return spectrum
+
+ def adjust_phase(self, zero_phase, first_phase=0., fixed_frequency=0.):
"""
Adjust the phases of the signal.
Refer to suspect.adjust_phase for full documentation.
+ Parameters
+ ----------
+ zero_phase: float
+ The zero order phase shift in radians
+ first_phase: float
+ The first order phase shift in radians per Hertz
+ fixed_frequency: float
+ The frequency at which the first order phase shift is zero
+
+ Returns
+ -------
+ out : MRSData
+ Phase adjusted FID
+
See Also
--------
suspect.adjust_phase : equivalent function
"""
- # easiest to apply the phase shift in the frequency domain
- # TODO when MRSSpectrum is a real class, this function can delegate
- # to that one.
+ # easiest to do this in the spectral domain
spectrum = self.spectrum()
+ return spectrum.adjust_phase(zero_phase, first_phase, fixed_frequency).fid()
+
+ def adjust_frequency(self, frequency_shift):
+ """
+ Adjust the centre frequency of the signal.
+
+ Refer to suspect.adjust_frequency for full documentation.
+
+ Parameters
+ ----------
+ frequency_shift: float
+ The amount to shift the frequency, in Hertz.
+
+ Returns
+ -------
+ out : MRSData
+ Frequency adjusted FID
+
+ See Also
+ --------
+ suspect.adjust_frequency : equivalent function
+ """
+ correction = numpy.exp(2j * numpy.pi * (frequency_shift * self.time_axis()))
+ return self.inherit(numpy.multiply(self, correction))
+
+
+class MRSSpectrum(MRSBase):
+ """
+ MRS data in the frequency domain
+ """
+
+ def fid(self):
+ """
+ Returns
+ -------
+ MRSData
+ The inverse-Fourier-shifted and inverse-Fourier-transformed data, represented as a FID
+ """
+ fid = self.inherit(numpy.fft.ifft(numpy.fft.ifftshift(self, axes=-1), axis=-1)).view(MRSData)
+ return fid
+
+ def adjust_phase(self, zero_phase, first_phase=0., fixed_frequency=0.):
+ """
+ Adjust the phases of the signal.
+
+ Refer to suspect.adjust_phase for full documentation.
+
+ Parameters
+ ----------
+ zero_phase: float
+ The zero order phase shift in radians
+ first_phase: float
+ The first order phase shift in radians per Hertz
+ fixed_frequency: float
+ The frequency at which the first order phase shift is zero
+
+ Returns
+ -------
+ out : MRSSpectrum
+ Phase adjusted spectrum
+
+ See Also
+ --------
+ suspect.adjust_phase : equivalent function
+ """
phase_ramp = numpy.linspace(-self.sw / 2,
self.sw / 2,
self.np,
endpoint=False)
phase_shift = zero_phase + first_phase * (fixed_frequency + phase_ramp)
- phased_spectrum = spectrum * numpy.exp(1j * phase_shift)
- return self.inherit(numpy.fft.ifft(numpy.fft.ifftshift(phased_spectrum, axes=-1), axis=-1))
+ phased_spectrum = self * numpy.exp(1j * phase_shift)
+ return phased_spectrum
def adjust_frequency(self, frequency_shift):
"""
@@ -283,12 +367,11 @@ class MRSData(numpy.ndarray):
Returns
-------
- out : MRSData
- Frequency adjusted FID
+ out : MRSSpectrum
+ Frequency adjusted spectrum
See Also
--------
suspect.adjust_frequency : equivalent function
"""
- correction = numpy.exp(2j * numpy.pi * (frequency_shift * self.time_axis()))
- return self.inherit(numpy.multiply(self, correction))
+ return self.fid().adjust_frequency(frequency_shift).spectrum()
| Spectra should be first class data objects
Currently MRSData objects contain FID data and provide various convenience methods and properties, while spectra are bare ndarrays without any additional information. It would be better if the two classes inherited from (let's say) MRSBase giving some core methods, and then provided their own methods on top of those. Calling .spectrum() on an MRSData would return an MRSSpectrum object and calling .fid() on an MRSSpectrum object would return an MRSData object. Then users can work with whichever domain they prefer, without having to go back and forth all the time. | openmrslab/suspect | diff --git a/tests/test_mrs/test_mrsdata.py b/tests/test_mrs/test_mrsdata.py
index 49c19bf..cb34c84 100644
--- a/tests/test_mrs/test_mrsdata.py
+++ b/tests/test_mrs/test_mrsdata.py
@@ -31,3 +31,12 @@ def test_zero_rank_to_scalar():
data = suspect.MRSData(numpy.ones(1024, 'complex'), 5e-4, 123)
sum = numpy.sum(data)
assert numpy.isscalar(sum)
+
+
+def test_spectrum_2_fid():
+ data = suspect.MRSData(numpy.ones(1024, 'complex'), 5e-4, 123)
+ spectrum = data.spectrum()
+ assert type(spectrum) == suspect.MRSSpectrum
+ numpy.testing.assert_equal(spectrum, numpy.fft.fftshift(numpy.fft.fft(data)))
+ fid = spectrum.fid()
+ numpy.testing.assert_equal(data, fid)
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 4
} | 0.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"mock"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.13
asteval==0.9.26
async-generator==1.10
attrs==22.2.0
Babel==2.11.0
backcall==0.2.0
bleach==4.1.0
certifi==2021.5.30
charset-normalizer==2.0.12
decorator==5.1.1
defusedxml==0.7.1
docutils==0.18.1
entrypoints==0.4
future==1.0.0
idna==3.10
imagesize==1.4.1
importlib-metadata==4.8.3
iniconfig==1.1.1
ipykernel==5.5.6
ipython==7.16.3
ipython-genutils==0.2.0
jedi==0.17.2
Jinja2==3.0.3
jsonschema==3.2.0
jupyter-client==7.1.2
jupyter-core==4.9.2
jupyterlab-pygments==0.1.2
lmfit==1.0.3
MarkupSafe==2.0.1
mistune==0.8.4
mock==5.2.0
nbclient==0.5.9
nbconvert==6.0.7
nbformat==5.1.3
nbsphinx==0.8.8
nest-asyncio==1.6.0
numpy==1.19.5
packaging==21.3
pandocfilters==1.5.1
Parsley==1.3
parso==0.7.1
pexpect==4.9.0
pickleshare==0.7.5
pluggy==1.0.0
prompt-toolkit==3.0.36
ptyprocess==0.7.0
py==1.11.0
pydicom==2.3.1
Pygments==2.14.0
pyparsing==3.1.4
pyrsistent==0.18.0
pytest==7.0.1
python-dateutil==2.9.0.post0
pytz==2025.2
PyWavelets==1.1.1
pyzmq==25.1.2
requests==2.27.1
scipy==1.5.4
six==1.17.0
snowballstemmer==2.2.0
Sphinx==5.3.0
sphinxcontrib-applehelp==1.0.2
sphinxcontrib-devhelp==1.0.2
sphinxcontrib-htmlhelp==2.0.0
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==1.0.3
sphinxcontrib-serializinghtml==1.1.5
-e git+https://github.com/openmrslab/suspect.git@522105c8852802054a8849405e74c8cb8fa2bc60#egg=suspect
testpath==0.6.0
tomli==1.2.3
tornado==6.1
traitlets==4.3.3
typing_extensions==4.1.1
uncertainties==3.1.7
urllib3==1.26.20
wcwidth==0.2.13
webencodings==0.5.1
zipp==3.6.0
| name: suspect
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.13
- asteval==0.9.26
- async-generator==1.10
- attrs==22.2.0
- babel==2.11.0
- backcall==0.2.0
- bleach==4.1.0
- charset-normalizer==2.0.12
- decorator==5.1.1
- defusedxml==0.7.1
- docutils==0.18.1
- entrypoints==0.4
- future==1.0.0
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- ipykernel==5.5.6
- ipython==7.16.3
- ipython-genutils==0.2.0
- jedi==0.17.2
- jinja2==3.0.3
- jsonschema==3.2.0
- jupyter-client==7.1.2
- jupyter-core==4.9.2
- jupyterlab-pygments==0.1.2
- lmfit==1.0.3
- markupsafe==2.0.1
- mistune==0.8.4
- mock==5.2.0
- nbclient==0.5.9
- nbconvert==6.0.7
- nbformat==5.1.3
- nbsphinx==0.8.8
- nest-asyncio==1.6.0
- numpy==1.19.5
- packaging==21.3
- pandocfilters==1.5.1
- parsley==1.3
- parso==0.7.1
- pexpect==4.9.0
- pickleshare==0.7.5
- pluggy==1.0.0
- prompt-toolkit==3.0.36
- ptyprocess==0.7.0
- py==1.11.0
- pydicom==2.3.1
- pygments==2.14.0
- pyparsing==3.1.4
- pyrsistent==0.18.0
- pytest==7.0.1
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pywavelets==1.1.1
- pyzmq==25.1.2
- requests==2.27.1
- scipy==1.5.4
- six==1.17.0
- snowballstemmer==2.2.0
- sphinx==5.3.0
- sphinxcontrib-applehelp==1.0.2
- sphinxcontrib-devhelp==1.0.2
- sphinxcontrib-htmlhelp==2.0.0
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==1.0.3
- sphinxcontrib-serializinghtml==1.1.5
- testpath==0.6.0
- tomli==1.2.3
- tornado==6.1
- traitlets==4.3.3
- typing-extensions==4.1.1
- uncertainties==3.1.7
- urllib3==1.26.20
- wcwidth==0.2.13
- webencodings==0.5.1
- zipp==3.6.0
prefix: /opt/conda/envs/suspect
| [
"tests/test_mrs/test_mrsdata.py::test_spectrum_2_fid"
]
| []
| [
"tests/test_mrs/test_mrsdata.py::test_create_mrs",
"tests/test_mrs/test_mrsdata.py::test_slice_mrs",
"tests/test_mrs/test_mrsdata.py::test_average_mrs",
"tests/test_mrs/test_mrsdata.py::test_zero_rank_to_scalar"
]
| []
| MIT License | 1,117 | [
"suspect/mrsdata.py",
"requirements.txt",
"suspect/core.py",
"suspect/__init__.py"
]
| [
"requirements.txt",
"suspect/core.py",
"suspect/mrsobjects.py",
"suspect/__init__.py"
]
|
jupyterhub__kubespawner-38 | 477b322b3bebd6d668185912c633eac2c1fee5dd | 2017-03-27 11:04:31 | 477b322b3bebd6d668185912c633eac2c1fee5dd | diff --git a/kubespawner/objects.py b/kubespawner/objects.py
index c75e80c..16509e4 100644
--- a/kubespawner/objects.py
+++ b/kubespawner/objects.py
@@ -97,7 +97,7 @@ def make_pod_spec(
{
'name': 'notebook',
'image': image_spec,
- 'command': cmd,
+ 'args': cmd,
'imagePullPolicy': image_pull_policy,
'ports': [{
'containerPort': port,
| Specify container command
Is it possible to specify the container command via config? To use the docker stacks, command must be set to `/usr/local/bin/singleuser.sh`, but I can't see how to do it here. In DockerSpawner, it's passed to `docker.create` | jupyterhub/kubespawner | diff --git a/tests/test_objects.py b/tests/test_objects.py
index 11f2a55..a32a72b 100644
--- a/tests/test_objects.py
+++ b/tests/test_objects.py
@@ -39,7 +39,7 @@ def test_make_simplest_pod():
"name": "notebook",
"image": "jupyter/singleuser:latest",
"imagePullPolicy": "IfNotPresent",
- "command": ["jupyterhub-singleuser"],
+ "args": ["jupyterhub-singleuser"],
"ports": [{
"containerPort": 8888
}],
@@ -97,7 +97,7 @@ def test_make_labeled_pod():
"name": "notebook",
"image": "jupyter/singleuser:latest",
"imagePullPolicy": "IfNotPresent",
- "command": ["jupyterhub-singleuser"],
+ "args": ["jupyterhub-singleuser"],
"ports": [{
"containerPort": 8888
}],
@@ -157,7 +157,7 @@ def test_make_pod_with_image_pull_secrets():
"name": "notebook",
"image": "jupyter/singleuser:latest",
"imagePullPolicy": "IfNotPresent",
- "command": ["jupyterhub-singleuser"],
+ "args": ["jupyterhub-singleuser"],
"ports": [{
"containerPort": 8888
}],
@@ -219,7 +219,7 @@ def test_set_pod_uid_fs_gid():
"name": "notebook",
"image": "jupyter/singleuser:latest",
"imagePullPolicy": "IfNotPresent",
- "command": ["jupyterhub-singleuser"],
+ "args": ["jupyterhub-singleuser"],
"ports": [{
"containerPort": 8888
}],
@@ -278,7 +278,7 @@ def test_make_pod_resources_all():
"name": "notebook",
"image": "jupyter/singleuser:latest",
"imagePullPolicy": "IfNotPresent",
- "command": ["jupyterhub-singleuser"],
+ "args": ["jupyterhub-singleuser"],
"ports": [{
"containerPort": 8888
}],
@@ -339,7 +339,7 @@ def test_make_pod_with_env():
"name": "notebook",
"image": "jupyter/singleuser:latest",
"imagePullPolicy": "IfNotPresent",
- "command": ["jupyterhub-singleuser"],
+ "args": ["jupyterhub-singleuser"],
"ports": [{
"containerPort": 8888
}],
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 1,
"test_score": 1
},
"num_modified_files": 1
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "jupyterhub pyyaml pycurl",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y python3-dev libcurl4-openssl-dev libssl-dev"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alembic @ file:///croot/alembic_1729532425530/work
annotated-types @ file:///croot/annotated-types_1709542908624/work
arrow==1.3.0
async-generator @ file:///home/ktietz/src/ci/async_generator_1611927993394/work
attrs @ file:///croot/attrs_1734533101012/work
blinker @ file:///croot/blinker_1737448726027/work
Brotli @ file:///croot/brotli-split_1736182456865/work
certifi @ file:///croot/certifi_1738623731865/work/certifi
certipy @ file:///Users/ktietz/demo/mc3/conda-bld/certipy_1630675329981/work
cffi @ file:///croot/cffi_1736182485317/work
charset-normalizer @ file:///croot/charset-normalizer_1721748349566/work
cryptography @ file:///croot/cryptography_1740577825284/work
exceptiongroup==1.2.2
fqdn==1.5.1
greenlet @ file:///croot/greenlet_1733860072744/work
idna @ file:///croot/idna_1714398848350/work
importlib_metadata @ file:///croot/importlib_metadata-suite_1732633488278/work
iniconfig==2.1.0
isoduration==20.11.0
Jinja2 @ file:///croot/jinja2_1741710844255/work
jsonpointer==3.0.0
jsonschema @ file:///croot/jsonschema_1728486696720/work
jsonschema-specifications @ file:///croot/jsonschema-specifications_1699032386549/work
jupyter-events @ file:///croot/jupyter_events_1741184577592/work
jupyterhub @ file:///croot/jupyterhub-split_1737582872553/work
-e git+https://github.com/jupyterhub/kubespawner.git@477b322b3bebd6d668185912c633eac2c1fee5dd#egg=jupyterhub_kubespawner
Mako @ file:///croot/mako_1665472421453/work
MarkupSafe @ file:///croot/markupsafe_1738584038848/work
oauthlib @ file:///croot/oauthlib_1679489621486/work
packaging @ file:///croot/packaging_1734472117206/work
pamela @ file:///croot/pamela_1737578271136/work
pluggy==1.5.0
prometheus_client @ file:///croot/prometheus_client_1731953121795/work
pycparser @ file:///tmp/build/80754af9/pycparser_1636541352034/work
pycurl @ file:///croot/pycurl_1737065866108/work
pydantic @ file:///croot/pydantic_1734736067156/work
pydantic_core @ file:///croot/pydantic-core_1734726052986/work
PyJWT @ file:///croot/pyjwt_1736178889029/work
pyOpenSSL @ file:///croot/pyopenssl_1741343803032/work
PySocks @ file:///tmp/build/80754af9/pysocks_1605305812635/work
pytest==8.3.5
python-dateutil @ file:///croot/python-dateutil_1716495738603/work
python-json-logger @ file:///croot/python-json-logger_1734370021104/work
PyYAML @ file:///croot/pyyaml_1728657952215/work
referencing @ file:///croot/referencing_1699012038513/work
requests @ file:///croot/requests_1730999120400/work
rfc3339-validator @ file:///croot/rfc3339-validator_1683077044675/work
rfc3986-validator @ file:///croot/rfc3986-validator_1683058983515/work
rpds-py @ file:///croot/rpds-py_1736541261634/work
six @ file:///tmp/build/80754af9/six_1644875935023/work
SQLAlchemy @ file:///croot/sqlalchemy_1736565039440/work
tomli==2.2.1
tornado @ file:///croot/tornado_1733960490606/work
traitlets @ file:///croot/traitlets_1718227057033/work
types-python-dateutil==2.9.0.20241206
typing_extensions @ file:///croot/typing_extensions_1734714854207/work
uri-template==1.3.0
urllib3 @ file:///croot/urllib3_1737133630106/work
webcolors==24.11.1
zipp @ file:///croot/zipp_1732630741423/work
| name: kubespawner
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- alembic=1.13.3=py39h06a4308_0
- annotated-types=0.6.0=py39h06a4308_0
- async_generator=1.10=pyhd3eb1b0_0
- attrs=24.3.0=py39h06a4308_0
- blinker=1.9.0=py39h06a4308_0
- brotli-python=1.0.9=py39h6a678d5_9
- c-ares=1.19.1=h5eee18b_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2025.1.31=py39h06a4308_0
- certipy=0.1.3=pyhd3eb1b0_0
- cffi=1.17.1=py39h1fdaa30_1
- charset-normalizer=3.3.2=pyhd3eb1b0_0
- configurable-http-proxy=4.6.0=h06a4308_0
- cryptography=44.0.1=py39h7825ff9_0
- greenlet=3.1.1=py39h6a678d5_0
- icu=73.1=h6a678d5_0
- idna=3.7=py39h06a4308_0
- importlib-metadata=8.5.0=py39h06a4308_0
- importlib_metadata=8.5.0=hd3eb1b0_0
- jinja2=3.1.6=py39h06a4308_0
- jsonschema=4.23.0=py39h06a4308_0
- jsonschema-specifications=2023.7.1=py39h06a4308_0
- jupyter_events=0.12.0=py39h06a4308_0
- jupyterhub=5.2.1=py39h06a4308_0
- jupyterhub-base=5.2.1=py39h06a4308_0
- krb5=1.20.1=h143b758_1
- ld_impl_linux-64=2.40=h12ee557_0
- libcurl=8.12.1=hc9e6f67_0
- libedit=3.1.20230828=h5eee18b_0
- libev=4.33=h7f8727e_1
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libnghttp2=1.57.0=h2d74bed_0
- libssh2=1.11.1=h251f7ec_0
- libstdcxx-ng=11.2.0=h1234567_1
- libuv=1.48.0=h5eee18b_0
- lz4-c=1.9.4=h6a678d5_1
- mako=1.2.3=py39h06a4308_0
- markupsafe=3.0.2=py39h5eee18b_0
- ncurses=6.4=h6a678d5_0
- nodejs=20.17.0=hb8e3597_0
- oauthlib=3.2.2=py39h06a4308_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pamela=1.2.0=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- prometheus_client=0.21.0=py39h06a4308_0
- pycparser=2.21=pyhd3eb1b0_0
- pycurl=7.45.4=py39haf51bff_0
- pydantic=2.10.3=py39h06a4308_0
- pydantic-core=2.27.1=py39h4aa5aa6_0
- pyjwt=2.10.1=py39h06a4308_0
- pyopenssl=25.0.0=py39h06a4308_0
- pysocks=1.7.1=py39h06a4308_0
- python=3.9.21=he870216_1
- python-dateutil=2.9.0post0=py39h06a4308_2
- python-json-logger=3.2.1=py39h06a4308_0
- pyyaml=6.0.2=py39h5eee18b_0
- readline=8.2=h5eee18b_0
- referencing=0.30.2=py39h06a4308_0
- requests=2.32.3=py39h06a4308_1
- rfc3339-validator=0.1.4=py39h06a4308_0
- rfc3986-validator=0.1.1=py39h06a4308_0
- rpds-py=0.22.3=py39h4aa5aa6_0
- setuptools=75.8.0=py39h06a4308_0
- six=1.16.0=pyhd3eb1b0_1
- sqlalchemy=2.0.37=py39h00e1ef3_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tornado=6.4.2=py39h5eee18b_0
- traitlets=5.14.3=py39h06a4308_0
- typing-extensions=4.12.2=py39h06a4308_0
- typing_extensions=4.12.2=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- urllib3=2.3.0=py39h06a4308_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- yaml=0.2.5=h7b6447c_0
- zipp=3.21.0=py39h06a4308_0
- zlib=1.2.13=h5eee18b_1
- zstd=1.5.6=hc292b87_0
- pip:
- arrow==1.3.0
- exceptiongroup==1.2.2
- fqdn==1.5.1
- iniconfig==2.1.0
- isoduration==20.11.0
- jsonpointer==3.0.0
- pluggy==1.5.0
- pytest==8.3.5
- tomli==2.2.1
- types-python-dateutil==2.9.0.20241206
- uri-template==1.3.0
- webcolors==24.11.1
prefix: /opt/conda/envs/kubespawner
| [
"tests/test_objects.py::test_make_simplest_pod",
"tests/test_objects.py::test_make_labeled_pod",
"tests/test_objects.py::test_make_pod_with_image_pull_secrets",
"tests/test_objects.py::test_set_pod_uid_fs_gid",
"tests/test_objects.py::test_make_pod_resources_all",
"tests/test_objects.py::test_make_pod_with_env"
]
| []
| [
"tests/test_objects.py::test_make_pvc_simple",
"tests/test_objects.py::test_make_resources_all"
]
| []
| BSD 3-Clause "New" or "Revised" License | 1,118 | [
"kubespawner/objects.py"
]
| [
"kubespawner/objects.py"
]
|
|
Duke-GCB__DukeDSClient-124 | fb617c41b09cfd859fc50b93308e3680e209069b | 2017-03-27 15:29:01 | bffebebd86d09f5924461959401ef3698b4e47d5 | diff --git a/ddsc/cmdparser.py b/ddsc/cmdparser.py
index 4f381b6..4d6e9c0 100644
--- a/ddsc/cmdparser.py
+++ b/ddsc/cmdparser.py
@@ -271,7 +271,6 @@ class CommandParser(object):
"""
description = "Gives user permission to access a remote project."
add_user_parser = self.subparsers.add_parser('add-user', description=description)
- self.subparsers.choices['add_user'] = add_user_parser
add_project_name_arg(add_user_parser, help_text="Name of the project to add a user to.")
user_or_email = add_user_parser.add_mutually_exclusive_group(required=True)
add_user_arg(user_or_email)
@@ -286,7 +285,6 @@ class CommandParser(object):
"""
description = "Removes user permission to access a remote project."
remove_user_parser = self.subparsers.add_parser('remove-user', description=description)
- self.subparsers.choices['remove_user'] = remove_user_parser
add_project_name_arg(remove_user_parser, help_text="Name of the project to remove a user from.")
user_or_email = remove_user_parser.add_mutually_exclusive_group(required=True)
add_user_arg(user_or_email)
| Remove duplicate commands in commandline.
There shouldn't be both add-user and add_user in the help. | Duke-GCB/DukeDSClient | diff --git a/ddsc/tests/test_cmdparser.py b/ddsc/tests/test_cmdparser.py
new file mode 100644
index 0000000..d1c4df7
--- /dev/null
+++ b/ddsc/tests/test_cmdparser.py
@@ -0,0 +1,19 @@
+from __future__ import absolute_import
+from unittest import TestCase
+from ddsc.cmdparser import CommandParser
+
+
+def no_op():
+ pass
+
+
+class TestCommandParser(TestCase):
+ def test_register_add_user_command(self):
+ command_parser = CommandParser()
+ command_parser.register_add_user_command(no_op)
+ self.assertEqual(['add-user'], list(command_parser.subparsers.choices.keys()))
+
+ def test_register_remove_user_command(self):
+ command_parser = CommandParser()
+ command_parser.register_remove_user_command(no_op)
+ self.assertEqual(['remove-user'], list(command_parser.subparsers.choices.keys()))
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 1
} | 0.3 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"mock",
"flake8",
"pytest"
],
"pre_install": null,
"python": "3.7",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi @ file:///croot/certifi_1671487769961/work/certifi
-e git+https://github.com/Duke-GCB/DukeDSClient.git@fb617c41b09cfd859fc50b93308e3680e209069b#egg=DukeDSClient
exceptiongroup==1.2.2
flake8==5.0.4
future==0.16.0
importlib-metadata==4.2.0
iniconfig==2.0.0
mccabe==0.7.0
mock==5.2.0
packaging==24.0
pluggy==1.2.0
pycodestyle==2.9.1
pyflakes==2.5.0
pytest==7.4.4
PyYAML==3.12
requests==2.13.0
six==1.10.0
tomli==2.0.1
typing_extensions==4.7.1
zipp==3.15.0
| name: DukeDSClient
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=22.3.1=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- exceptiongroup==1.2.2
- flake8==5.0.4
- future==0.16.0
- importlib-metadata==4.2.0
- iniconfig==2.0.0
- mccabe==0.7.0
- mock==5.2.0
- packaging==24.0
- pluggy==1.2.0
- pycodestyle==2.9.1
- pyflakes==2.5.0
- pytest==7.4.4
- pyyaml==3.12
- requests==2.13.0
- six==1.10.0
- tomli==2.0.1
- typing-extensions==4.7.1
- zipp==3.15.0
prefix: /opt/conda/envs/DukeDSClient
| [
"ddsc/tests/test_cmdparser.py::TestCommandParser::test_register_add_user_command",
"ddsc/tests/test_cmdparser.py::TestCommandParser::test_register_remove_user_command"
]
| []
| []
| []
| MIT License | 1,119 | [
"ddsc/cmdparser.py"
]
| [
"ddsc/cmdparser.py"
]
|
|
sciunto-org__python-bibtexparser-155 | 2ce301ecd01f4db0cb355abc36486ab0c5c62331 | 2017-03-27 20:57:20 | 19051fdaeb3eea869aef1f7534d0a678f12f1b8c | diff --git a/bibtexparser/customization.py b/bibtexparser/customization.py
index 98cd246..c1b19b2 100644
--- a/bibtexparser/customization.py
+++ b/bibtexparser/customization.py
@@ -7,11 +7,10 @@ You can find inspiration from these functions to design yours.
Each of them takes a record and return the modified record.
"""
-import itertools
import re
import logging
-from bibtexparser.latexenc import unicode_to_latex, unicode_to_crappy_latex1, unicode_to_crappy_latex2, string_to_latex, protect_uppercase
+from bibtexparser.latexenc import latex_to_unicode, string_to_latex, protect_uppercase
logger = logging.getLogger(__name__)
@@ -495,22 +494,16 @@ def convert_to_unicode(record):
:returns: dict -- the modified record.
"""
for val in record:
- if '\\' in record[val] or '{' in record[val]:
- for k, v in itertools.chain(unicode_to_crappy_latex1, unicode_to_latex):
- if v in record[val]:
- record[val] = record[val].replace(v, k)
-
- # If there is still very crappy items
- if '\\' in record[val]:
- for k, v in unicode_to_crappy_latex2:
- if v in record[val]:
- parts = record[val].split(str(v))
- for key, record[val] in enumerate(parts):
- if key+1 < len(parts) and len(parts[key+1]) > 0:
- # Change order to display accents
- parts[key] = parts[key] + parts[key+1][0]
- parts[key+1] = parts[key+1][1:]
- record[val] = k.join(parts)
+ if isinstance(record[val], list):
+ record[val] = [
+ latex_to_unicode(x) for x in record[val]
+ ]
+ elif isinstance(record[val], dict):
+ record[val] = {
+ k: latex_to_unicode(v) for k, v in record[val].items()
+ }
+ else:
+ record[val] = latex_to_unicode(record[val])
return record
diff --git a/bibtexparser/latexenc.py b/bibtexparser/latexenc.py
index b919a24..e225de4 100644
--- a/bibtexparser/latexenc.py
+++ b/bibtexparser/latexenc.py
@@ -7,11 +7,14 @@
# Etienne Posthumus (epoz)
# Francois Boulogne <fboulogne at april dot org>
+import itertools
import re
import sys
+import unicodedata
-__all__ = ['string_to_latex', 'protect_uppercase', 'unicode_to_latex',
- 'unicode_to_crappy_latex1', 'unicode_to_crappy_latex2']
+__all__ = ['string_to_latex', 'latex_to_unicode', 'protect_uppercase',
+ 'unicode_to_latex', 'unicode_to_crappy_latex1',
+ 'unicode_to_crappy_latex2']
def string_to_latex(string):
@@ -29,6 +32,62 @@ def string_to_latex(string):
return ''.join(new)
+def latex_to_unicode(string):
+ """
+ Convert a LaTeX string to unicode equivalent.
+
+ :param string: string to convert
+ :returns: string
+ """
+ if '\\' in string or '{' in string:
+ for k, v in itertools.chain(unicode_to_crappy_latex1, unicode_to_latex):
+ if v in string:
+ string = string.replace(v, k)
+
+ # If there is still very crappy items
+ if '\\' in string:
+ for k, v in unicode_to_crappy_latex2:
+ if v in string:
+ parts = string.split(str(v))
+ for key, string in enumerate(parts):
+ if key+1 < len(parts) and len(parts[key+1]) > 0:
+ # Change order to display accents
+ parts[key] = parts[key] + parts[key+1][0]
+ parts[key+1] = parts[key+1][1:]
+ string = k.join(parts)
+
+ # Place accents at correct position
+ # LaTeX requires accents *before* the character. Unicode requires accents
+ # to be *after* the character. Hence, by a raw conversion, accents are not
+ # on the correct letter, see
+ # https://github.com/sciunto-org/python-bibtexparser/issues/121.
+ # We just swap accents positions to fix this.
+ cleaned_string = []
+ i = 0
+ while i < len(string):
+ if not unicodedata.combining(string[i]):
+ # Not a combining diacritical mark, append it
+ cleaned_string.append(string[i])
+ i += 1
+ elif i < len(string) - 1:
+ # Diacritical mark, append it but swap with next character
+ cleaned_string.append(string[i + 1])
+ cleaned_string.append(string[i])
+ i += 2
+ else:
+ # If trailing character is a combining one, just discard it
+ i += 1
+
+ # Normalize unicode characters
+ # Also, when converting to unicode, we should return a normalized Unicode
+ # string, that is always having only compound accentuated character (letter
+ # + accent) or single accentuated character (letter with accent). We choose
+ # to normalize to the latter.
+ cleaned_string = unicodedata.normalize("NFC", "".join(cleaned_string))
+
+ return cleaned_string
+
+
def protect_uppercase(string):
"""
Protect uppercase letters for bibtex
| convert_to_unicode
I am using the
parser.customization = convert_to_unicode
feature, but in the utf-8 output the accents are coming out on the character before the correct one. What would cause this?
| sciunto-org/python-bibtexparser | diff --git a/bibtexparser/tests/test_customization.py b/bibtexparser/tests/test_customization.py
index 7587d5e..c6b168b 100644
--- a/bibtexparser/tests/test_customization.py
+++ b/bibtexparser/tests/test_customization.py
@@ -86,6 +86,11 @@ class TestBibtexParserMethod(unittest.TestCase):
result = convert_to_unicode(record)
expected = {'toto': 'ü ü'}
self.assertEqual(result, expected)
+ # From issue 121
+ record = {'title': '{Two Gedenk\\"uberlieferung der Angelsachsen}'}
+ result = convert_to_unicode(record)
+ expected = {'title': '{Two Gedenküberlieferung der Angelsachsen}'}
+ self.assertEqual(result, expected)
###########
# homogenize
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 3
},
"num_modified_files": 2
} | 0.6 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"nose",
"nose-cov",
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | -e git+https://github.com/sciunto-org/python-bibtexparser.git@2ce301ecd01f4db0cb355abc36486ab0c5c62331#egg=bibtexparser
cov-core==1.15.0
coverage==7.8.0
exceptiongroup==1.2.2
iniconfig==2.1.0
nose==1.3.7
nose-cov==1.6
packaging==24.2
pluggy==1.5.0
pyparsing==3.2.3
pytest==8.3.5
tomli==2.2.1
| name: python-bibtexparser
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- cov-core==1.15.0
- coverage==7.8.0
- exceptiongroup==1.2.2
- iniconfig==2.1.0
- nose==1.3.7
- nose-cov==1.6
- packaging==24.2
- pluggy==1.5.0
- pyparsing==3.2.3
- pytest==8.3.5
- tomli==2.2.1
prefix: /opt/conda/envs/python-bibtexparser
| [
"bibtexparser/tests/test_customization.py::TestBibtexParserMethod::test_convert_to_unicode"
]
| []
| [
"bibtexparser/tests/test_customization.py::TestBibtexParserMethod::test_getnames",
"bibtexparser/tests/test_customization.py::TestBibtexParserMethod::test_homogenize",
"bibtexparser/tests/test_customization.py::TestBibtexParserMethod::test_keywords",
"bibtexparser/tests/test_customization.py::TestBibtexParserMethod::test_page_double_hyphen_alreadyOK",
"bibtexparser/tests/test_customization.py::TestBibtexParserMethod::test_page_double_hyphen_nothing",
"bibtexparser/tests/test_customization.py::TestBibtexParserMethod::test_page_double_hyphen_simple",
"bibtexparser/tests/test_customization.py::TestBibtexParserMethod::test_page_double_hyphen_space"
]
| []
| MIT License | 1,120 | [
"bibtexparser/customization.py",
"bibtexparser/latexenc.py"
]
| [
"bibtexparser/customization.py",
"bibtexparser/latexenc.py"
]
|
|
jendrikseipp__vulture-30 | bae51858abe19f3337e5e17cbb170533161d634a | 2017-03-28 04:49:05 | bae51858abe19f3337e5e17cbb170533161d634a | jendrikseipp: The file looks good. Now we only need a test case under `tests/test_whitelist_python.py`. It should check that whitelist_python.py can be run by Python without errors and that code using the `default_factory` attribute is not marked as unused. You can look at `tests/test_script.py` to find out how to test Python files.
RJ722: HI! @jendrikseipp
Is the current approach in testing (adding an example file and running vulture on it) feasible?
jendrikseipp: This is the only way to test whitelist files. However, normal code
snippets should be tested in the same way as in `test_scavenging.py`. If
you're adding multiple tests that belong together, you may consider
adding a separate test file for that, like `test_format_strings.py`.
RJ722: Thanks! :smile:
Also, one thing I thought was necessary to bring to your attention was that I have named the example file with a `.txt` extension, because otherwise with the `.py`, it will cause the tests to fail as it would try to run vulture over example file without taking whitelist into consideration!
Also, do we need extra test_cases here?
RJ722: Hi! @jendrikseipp
Does this looks all right or do we need further enhancements? | diff --git a/whitelist_python.py b/whitelist_python.py
new file mode 100644
index 0000000..8e459e6
--- /dev/null
+++ b/whitelist_python.py
@@ -0,0 +1,4 @@
+import collections
+
+collections.defaultdict(list).default_factory = None
+collections.defaultdict(list).default_factory
| Whitelist defaultdict.default_factory
Originally reported by: **bitserver (Bitbucket: [bitserver](https://bitbucket.org/bitserver), GitHub: [bitserver](https://github.com/bitserver))**
----------------------------------------
I have a collections.defaultdict(). After I have finished populating it, I have to set its default_factory attribute to None. vulture then tells me that default_factory is an unused attribute, but I can't help it, as that's how defaultdict works.
----------------------------------------
- Bitbucket: https://bitbucket.org/jendrikseipp/vulture/issue/21
| jendrikseipp/vulture | diff --git a/tests/test_whitelist_python.py b/tests/test_whitelist_python.py
new file mode 100644
index 0000000..3529af0
--- /dev/null
+++ b/tests/test_whitelist_python.py
@@ -0,0 +1,15 @@
+import subprocess
+import sys
+
+from .test_script import call_vulture, REPO
+
+whitelist_file = 'whitelist_python.py'
+
+
+def test_whitelist_python_with_python():
+ assert subprocess.call(
+ [sys.executable, whitelist_file], cwd=REPO) == 0
+
+
+def test_whitelist_python_with_vulture():
+ assert call_vulture([whitelist_file]) == 0
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_added_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 0
} | 0.13 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": null,
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | coverage==7.8.0
exceptiongroup @ file:///croot/exceptiongroup_1706031385326/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
packaging @ file:///croot/packaging_1734472117206/work
pluggy @ file:///croot/pluggy_1733169602837/work
pytest @ file:///croot/pytest_1738938843180/work
pytest-cov==6.0.0
tomli @ file:///opt/conda/conda-bld/tomli_1657175507142/work
-e git+https://github.com/jendrikseipp/vulture.git@bae51858abe19f3337e5e17cbb170533161d634a#egg=vulture
| name: vulture
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pip=25.0=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==7.8.0
- pytest-cov==6.0.0
prefix: /opt/conda/envs/vulture
| [
"tests/test_whitelist_python.py::test_whitelist_python_with_python",
"tests/test_whitelist_python.py::test_whitelist_python_with_vulture"
]
| []
| []
| []
| MIT License | 1,121 | [
"whitelist_python.py"
]
| [
"whitelist_python.py"
]
|
openmrslab__suspect-46 | 964f2460e46378c29e78d280f999128f34e829df | 2017-03-28 13:30:47 | 964f2460e46378c29e78d280f999128f34e829df | coveralls:
[](https://coveralls.io/builds/10809685)
Coverage increased (+0.5%) to 75.646% when pulling **ff8d94bd9539a608b8588295309b45ac9a9ffb5d on 45_twix_transform** into **964f2460e46378c29e78d280f999128f34e829df on master**.
| diff --git a/suspect/_transforms.py b/suspect/_transforms.py
index 78bc68a..caddbfe 100644
--- a/suspect/_transforms.py
+++ b/suspect/_transforms.py
@@ -38,3 +38,33 @@ def transformation_matrix(x_vector, y_vector, translation, spacing):
for j in range(4):
matrix[i, j] *= spacing[j]
return matrix
+
+
+def rotation_matrix(angle, axis):
+ """
+ Creates a 3x3 matrix which rotates `angle` radians around `axis`
+
+ Parameters
+ ----------
+ angle : float
+ The angle in radians to rotate around the axis
+ axis : array
+ The unit vector around which to rotate
+
+ Returns
+ -------
+ matrix : array
+ """
+ c = numpy.cos(angle)
+ s = numpy.sin(angle)
+ matrix = numpy.zeros((3, 3))
+ matrix[0, 0] = c + axis[0] ** 2 * (1 - c)
+ matrix[0, 1] = axis[0] * axis[1] * (1 - c) - axis[2] * s
+ matrix[0, 2] = axis[0] * axis[2] * (1 - c) + axis[1] * s
+ matrix[1, 0] = axis[1] * axis[0] * (1 - c) + axis[2] * s
+ matrix[1, 1] = c + axis[1] ** 2 * (1 - c)
+ matrix[1, 2] = axis[1] * axis[2] * (1 - c) - axis[0] * s
+ matrix[2, 0] = axis[2] * axis[0] * (1 - c) - axis[1] * s
+ matrix[2, 1] = axis[2] * axis[1] * (1 - c) + axis[0] * s
+ matrix[2, 2] = c + axis[2] ** 2 * (1 - c)
+ return matrix
diff --git a/suspect/_version.py b/suspect/_version.py
index d3ec452..acafb4d 100644
--- a/suspect/_version.py
+++ b/suspect/_version.py
@@ -1,1 +1,1 @@
-__version__ = "0.2.0"
+__version__ = "0.3.0a"
diff --git a/suspect/io/twix.py b/suspect/io/twix.py
index 6f5c335..9daa82b 100644
--- a/suspect/io/twix.py
+++ b/suspect/io/twix.py
@@ -1,7 +1,8 @@
-from suspect import MRSData
+from suspect import MRSData, transformation_matrix, rotation_matrix
import struct
import numpy
+#import quaternion
import re
# This file largely relies on information from Siemens regarding the structure
@@ -58,13 +59,25 @@ class TwixBuilder(object):
"patient_id": self.header_params["patient_id"],
"patient_birthdate": self.header_params["patient_birthdate"]
}
- mrs_data = MRSData(data, self.header_params["dt"], self.header_params["f0"], metadata=metadata)
+ mrs_data = MRSData(data,
+ self.header_params["dt"],
+ self.header_params["f0"],
+ metadata=metadata,
+ transform=self.header_params["transform"])
return mrs_data
+def read_double(name, header_string):
+ substring = re.search(r"<ParamDouble.\"{}\"> {{ <Precision> \d+( -?[0-9\.]+)? }}".format(name), header_string)
+ if not substring:
+ raise KeyError(r'ParamDouble."{}" not found in header string'.format(name))
+ number_string = substring.group(1)
+ return float(number_string) if number_string else 0
+
+
def parse_twix_header(header_string):
- # print(header_string)
+ #print(header_string)
# get the name of the protocol being acquired
protocol_name_string = re.search(r"<ParamString.\"tProtocolName\"> { \".+\" }\n", header_string).group()
protocol_name = protocol_name_string.split("\"")[3]
@@ -100,12 +113,49 @@ def parse_twix_header(header_string):
break
else:
raise KeyError("Unable to identify Dwell Time from header")
+
+ # get voxel size
+ ro_fov = read_double("VoI_RoFOV", header_string)
+ pe_fov = read_double("VoI_PeFOV", header_string)
+ slice_thickness = read_double("VoI_SliceThickness", header_string)
+
+ # get position information
+ pos_sag = read_double("VoI_Position_Sag", header_string)
+ pos_cor = read_double("VoI_Position_Cor", header_string)
+ pos_tra = read_double("VoI_Position_Tra", header_string)
+
+ # get orientation information
+ in_plane_rot = read_double("VoI_InPlaneRotAngle", header_string)
+ normal_sag = read_double("VoI_Normal_Sag", header_string)
+ normal_cor = read_double("VoI_Normal_Cor", header_string)
+ normal_tra = read_double("VoI_Normal_Tra", header_string)
+
+ # the orientation is stored in a somewhat strange way - a normal vector and
+ # a rotation angle. to get the row vector, we first use Gram-Schmidt to
+ # make [-1, 0, 0] (the default row vector) orthogonal to the normal, and
+ # then rotate that vector by the rotation angle (which we do here with a
+ # quaternion (not any more, quaternion library has issues with Travis)
+ x_vector = numpy.array([-1, 0, 0])
+ normal_vector = numpy.array([normal_sag, normal_cor, normal_tra])
+ orthogonal_x = x_vector - numpy.dot(x_vector, normal_vector) * normal_vector
+ orthonormal_x = orthogonal_x / numpy.linalg.norm(orthogonal_x)
+ #rotation_quaternion = quaternion.from_rotation_vector(in_plane_rot * normal_vector)
+ #row_vector2 = quaternion.rotate_vectors(rotation_quaternion, orthonormal_x)
+ rot_matrix = rotation_matrix(in_plane_rot, normal_vector)
+ row_vector = numpy.dot(rot_matrix, orthonormal_x)
+ column_vector = numpy.cross(row_vector, normal_vector)
+ transform = transformation_matrix(row_vector,
+ column_vector,
+ [pos_sag, pos_cor, pos_tra],
+ [ro_fov, pe_fov, slice_thickness])
+
return {"protocol_name": protocol_name,
"patient_name": patient_name,
"patient_id": patient_id,
"patient_birthdate": patient_birthday,
"dt": dwell_time,
- "f0": frequency
+ "f0": frequency,
+ "transform": transform
}
| Get spatial information out of twix files
At the moment, only rda files load a transform to enable them to be co-registered to structural MRI images. It should in theory be possible to read this information out of the twix file as well. | openmrslab/suspect | diff --git a/tests/test_mrs/test_twix.py b/tests/test_mrs/test_twix.py
index 4c95f5f..2b3b374 100644
--- a/tests/test_mrs/test_twix.py
+++ b/tests/test_mrs/test_twix.py
@@ -15,6 +15,12 @@ def test_veriofile():
assert data.np == 2048
assert data.dt == 2.5e-4
numpy.testing.assert_almost_equal(data.f0, 123.261716)
+ numpy.testing.assert_allclose(data.transform, numpy.array(
+ [[-20, 0, 0, 4.917676],
+ [0, 20, 0, 57.525424],
+ [0, 0, -20, 43.220339],
+ [0, 0, 0, 1]]
+ ))
#def test_skyra():
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 3,
"test_score": 2
},
"num_modified_files": 3
} | 0.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"mock"
],
"pre_install": null,
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.13
asteval==0.9.26
async-generator==1.10
attrs==22.2.0
Babel==2.11.0
backcall==0.2.0
bleach==4.1.0
certifi==2021.5.30
charset-normalizer==2.0.12
decorator==5.1.1
defusedxml==0.7.1
docutils==0.18.1
entrypoints==0.4
future==1.0.0
idna==3.10
imagesize==1.4.1
importlib-metadata==4.8.3
iniconfig==1.1.1
ipykernel==5.5.6
ipython==7.16.3
ipython-genutils==0.2.0
jedi==0.17.2
Jinja2==3.0.3
jsonschema==3.2.0
jupyter-client==7.1.2
jupyter-core==4.9.2
jupyterlab-pygments==0.1.2
lmfit==1.0.3
MarkupSafe==2.0.1
mistune==0.8.4
mock==5.2.0
nbclient==0.5.9
nbconvert==6.0.7
nbformat==5.1.3
nbsphinx==0.8.8
nest-asyncio==1.6.0
numpy==1.19.5
packaging==21.3
pandocfilters==1.5.1
Parsley==1.3
parso==0.7.1
pexpect==4.9.0
pickleshare==0.7.5
pluggy==1.0.0
prompt-toolkit==3.0.36
ptyprocess==0.7.0
py==1.11.0
pydicom==2.3.1
Pygments==2.14.0
pyparsing==3.1.4
pyrsistent==0.18.0
pytest==7.0.1
python-dateutil==2.9.0.post0
pytz==2025.2
PyWavelets==1.1.1
pyzmq==25.1.2
requests==2.27.1
scipy==1.5.4
six==1.17.0
snowballstemmer==2.2.0
Sphinx==5.3.0
sphinxcontrib-applehelp==1.0.2
sphinxcontrib-devhelp==1.0.2
sphinxcontrib-htmlhelp==2.0.0
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==1.0.3
sphinxcontrib-serializinghtml==1.1.5
-e git+https://github.com/openmrslab/suspect.git@964f2460e46378c29e78d280f999128f34e829df#egg=suspect
testpath==0.6.0
tomli==1.2.3
tornado==6.1
traitlets==4.3.3
typing_extensions==4.1.1
uncertainties==3.1.7
urllib3==1.26.20
wcwidth==0.2.13
webencodings==0.5.1
zipp==3.6.0
| name: suspect
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.13
- asteval==0.9.26
- async-generator==1.10
- attrs==22.2.0
- babel==2.11.0
- backcall==0.2.0
- bleach==4.1.0
- charset-normalizer==2.0.12
- decorator==5.1.1
- defusedxml==0.7.1
- docutils==0.18.1
- entrypoints==0.4
- future==1.0.0
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- ipykernel==5.5.6
- ipython==7.16.3
- ipython-genutils==0.2.0
- jedi==0.17.2
- jinja2==3.0.3
- jsonschema==3.2.0
- jupyter-client==7.1.2
- jupyter-core==4.9.2
- jupyterlab-pygments==0.1.2
- lmfit==1.0.3
- markupsafe==2.0.1
- mistune==0.8.4
- mock==5.2.0
- nbclient==0.5.9
- nbconvert==6.0.7
- nbformat==5.1.3
- nbsphinx==0.8.8
- nest-asyncio==1.6.0
- numpy==1.19.5
- packaging==21.3
- pandocfilters==1.5.1
- parsley==1.3
- parso==0.7.1
- pexpect==4.9.0
- pickleshare==0.7.5
- pluggy==1.0.0
- prompt-toolkit==3.0.36
- ptyprocess==0.7.0
- py==1.11.0
- pydicom==2.3.1
- pygments==2.14.0
- pyparsing==3.1.4
- pyrsistent==0.18.0
- pytest==7.0.1
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pywavelets==1.1.1
- pyzmq==25.1.2
- requests==2.27.1
- scipy==1.5.4
- six==1.17.0
- snowballstemmer==2.2.0
- sphinx==5.3.0
- sphinxcontrib-applehelp==1.0.2
- sphinxcontrib-devhelp==1.0.2
- sphinxcontrib-htmlhelp==2.0.0
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==1.0.3
- sphinxcontrib-serializinghtml==1.1.5
- testpath==0.6.0
- tomli==1.2.3
- tornado==6.1
- traitlets==4.3.3
- typing-extensions==4.1.1
- uncertainties==3.1.7
- urllib3==1.26.20
- wcwidth==0.2.13
- webencodings==0.5.1
- zipp==3.6.0
prefix: /opt/conda/envs/suspect
| [
"tests/test_mrs/test_twix.py::test_veriofile"
]
| []
| [
"tests/test_mrs/test_twix.py::test_twix_nofile"
]
| []
| MIT License | 1,122 | [
"suspect/_transforms.py",
"suspect/_version.py",
"suspect/io/twix.py"
]
| [
"suspect/_transforms.py",
"suspect/_version.py",
"suspect/io/twix.py"
]
|
zalando-incubator__kubernetes-log-watcher-32 | 7b9c8c63c94785b04f9e4c6509d6cfeb6876763c | 2017-03-28 16:17:02 | 7b9c8c63c94785b04f9e4c6509d6cfeb6876763c | codecov-io: # [Codecov](https://codecov.io/gh/zalando-incubator/kubernetes-log-watcher/pull/32?src=pr&el=h1) Report
> Merging [#32](https://codecov.io/gh/zalando-incubator/kubernetes-log-watcher/pull/32?src=pr&el=desc) into [master](https://codecov.io/gh/zalando-incubator/kubernetes-log-watcher/commit/7b9c8c63c94785b04f9e4c6509d6cfeb6876763c?src=pr&el=desc) will **decrease** coverage by `0.06%`.
> The diff coverage is `75%`.
[](https://codecov.io/gh/zalando-incubator/kubernetes-log-watcher/pull/32?src=pr&el=tree)
```diff
@@ Coverage Diff @@
## master #32 +/- ##
=========================================
- Coverage 78.97% 78.9% -0.07%
=========================================
Files 9 9
Lines 371 384 +13
=========================================
+ Hits 293 303 +10
- Misses 78 81 +3
```
| [Impacted Files](https://codecov.io/gh/zalando-incubator/kubernetes-log-watcher/pull/32?src=pr&el=tree) | Coverage Δ | |
|---|---|---|
| [kube\_log\_watcher/kube.py](https://codecov.io/gh/zalando-incubator/kubernetes-log-watcher/pull/32?src=pr&el=tree#diff-a3ViZV9sb2dfd2F0Y2hlci9rdWJlLnB5) | `69.23% <0%> (-3.75%)` | :arrow_down: |
| [kube\_log\_watcher/agents/scalyr.py](https://codecov.io/gh/zalando-incubator/kubernetes-log-watcher/pull/32?src=pr&el=tree#diff-a3ViZV9sb2dfd2F0Y2hlci9hZ2VudHMvc2NhbHlyLnB5) | `92.3% <88.23%> (-0.17%)` | :arrow_down: |
------
[Continue to review full report at Codecov](https://codecov.io/gh/zalando-incubator/kubernetes-log-watcher/pull/32?src=pr&el=continue).
> **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta)
> `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data`
> Powered by [Codecov](https://codecov.io/gh/zalando-incubator/kubernetes-log-watcher/pull/32?src=pr&el=footer). Last update [7b9c8c6...0b31a74](https://codecov.io/gh/zalando-incubator/kubernetes-log-watcher/pull/32?src=pr&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments).
lmineiro: Although I can't, in all honesty, grasp parts of this, it LGTM
lmineiro: 👍
mohabusama: 👍 | diff --git a/README.rst b/README.rst
index 64a7b52..2a6ae78 100644
--- a/README.rst
+++ b/README.rst
@@ -89,34 +89,16 @@ Operation
Example manifest
----------------
-This is an example manifest for shipping logs to Scalyr, with additional Journald monitoring for master processes running on the node:
+This is an example manifest for shipping logs to Scalyr, with additional Journald monitoring for master processes running on the node.
-.. code-block:: yaml
-
- apiVersion: v1
- kind: ConfigMap
- metadata:
- name: scalyr-config-base
- namespace: kube-system
- data:
- scalyr.config: |
- {
- "import_vars": ["WATCHER_SCALYR_API_KEY", "WATCHER_CLUSTER_ID"],
+.. note::
- "api_key": "$WATCHER_SCALYR_API_KEY",
+ - This manifest assumes running a Kubernetes cluster version > 1.5 (as it depends on `initContainer <https://kubernetes.io/docs/concepts/workloads/pods/init-containers/>`_ for initial Scalyr configuration)
+ - All shared volumes are of type ``hostPath`` in order to survive pod restarts.
+ - Initial Scalyr configuration using ``configMap`` is no longer used as it appears to be reseted to initial values by Kubernetes.
- "server_attributes": {
- "serverHost": "$WATCHER_CLUSTER_ID"
- },
-
- "implicit_metric_monitor": false,
- "implicit_agent_process_metrics_monitor": false,
-
- "logs": [],
- "monitors": []
- }
+.. code-block:: yaml
- ---
apiVersion: extensions/v1beta1
kind: DaemonSet
metadata:
@@ -140,18 +122,50 @@ This is an example manifest for shipping logs to Scalyr, with additional Journal
annotations:
scheduler.alpha.kubernetes.io/critical-pod: ''
scheduler.alpha.kubernetes.io/tolerations: '[{"key":"CriticalAddonsOnly", "operator":"Exists"}]'
+ pod.beta.kubernetes.io/init-containers: '[
+ {
+ "name": "init-scalyr-config",
+ "image": "busybox",
+ "imagePullPolicy": "IfNotPresent",
+ "command": ["sh", "-c"],
+ "args": [
+ "if [ ! -f /mnt/scalyr/agent.json ]; then
+ echo {
+ \\\"import_vars\\\": [\\\"WATCHER_SCALYR_API_KEY\\\", \\\"WATCHER_CLUSTER_ID\\\"],
+ \\\"server_attributes\\\": {\\\"serverHost\\\": \\\"\\$WATCHER_CLUSTER_ID\\\"},
+ \\\"implicit_agent_process_metrics_monitor\\\": false,
+ \\\"implicit_metric_monitor\\\": false,
+ \\\"api_key\\\": \\\"\\$WATCHER_SCALYR_API_KEY\\\",
+ \\\"monitors\\\": [],
+ \\\"logs\\\": []
+ } > /mnt/scalyr/agent.json;
+ echo Updated agent.json to inital configuration;
+ fi
+ && cat /mnt/scalyr/agent.json;
+ test -f /mnt/scalyr-checkpoint/checkpoints.json && ls -lah /mnt/scalyr-checkpoint/checkpoints.json && cat /mnt/scalyr-checkpoint/checkpoints.json"
+ ],
+ "volumeMounts": [
+ {
+ "name": "scalyr-config",
+ "mountPath": "/mnt/scalyr"
+ },
+ {
+ "name": "scalyr-checkpoint",
+ "mountPath": "/mnt/scalyr-checkpoint"
+ }
+ ]
+ }
+ ]'
spec:
containers:
- name: log-watcher
- image: registry.opensource.zalan.do/eagleeye/kubernetes-log-watcher:0.11
+ image: registry.opensource.zalan.do/eagleeye/kubernetes-log-watcher:0.12
env:
- name: CLUSTER_NODE_NAME
valueFrom:
fieldRef:
fieldPath: spec.nodeName
- - name: WATCHER_KUBERNETES_UPDATE_CERTIFICATES
- value: "true"
- name: WATCHER_DEBUG
value: "true"
- name: WATCHER_CLUSTER_ID
@@ -177,13 +191,13 @@ This is an example manifest for shipping logs to Scalyr, with additional Journal
readOnly: false
- name: scalyr-config
mountPath: /mnt/scalyr-config
- readOnly: false
- name: scalyr-agent
- image: registry.opensource.zalan.do/eagleeye/scalyr-agent:0.1
+
+ image: registry.opensource.zalan.do/eagleeye/scalyr-agent:0.2
env:
- # Note: added for scalyr-config-base, but not needed by the scalyr-agent itself.
+ # Note: added for scalyr-config, but not needed by the scalyr-agent itself.
- name: WATCHER_SCALYR_API_KEY
value: "<SCALYR-KEY-HERE>"
- name: WATCHER_CLUSTER_ID
@@ -196,8 +210,10 @@ This is an example manifest for shipping logs to Scalyr, with additional Journal
- name: scalyr-logs
mountPath: /mnt/scalyr-logs
readOnly: true
+ - name: scalyr-checkpoint
+ mountPath: /var/lib/scalyr-agent-2
- name: scalyr-config
- mountPath: /etc/scalyr-agent-2/
+ mountPath: /etc/scalyr-agent-2
readOnly: true
- name: journal
mountPath: /var/log/journal
@@ -212,15 +228,17 @@ This is an example manifest for shipping logs to Scalyr, with additional Journal
hostPath:
path: /var/log/journal
- - name: scalyr-logs
- emptyDir: {}
+ - name: scalyr-checkpoint
+ hostPath:
+ path: /var/lib/scalyr-agent
- name: scalyr-config
- configMap:
- name: scalyr-config-base
- items:
- - key: scalyr.config
- path: agent.json
+ hostPath:
+ path: /etc/scalyr-agent
+
+ - name: scalyr-logs
+ hostPath:
+ path: /var/log/scalyr-agent
Configuration
diff --git a/kube_log_watcher/agents/scalyr.py b/kube_log_watcher/agents/scalyr.py
index ad831fe..798d229 100644
--- a/kube_log_watcher/agents/scalyr.py
+++ b/kube_log_watcher/agents/scalyr.py
@@ -31,6 +31,15 @@ class ScalyrAgent(BaseWatcher):
'Scalyr watcher agent initialization failed. {} config path does not exist.'.format(
self.config_path))
+ if not os.path.exists(self.dest_path):
+ raise RuntimeError(
+ 'Scalyr watcher agent initialization failed. {} destination path does not exist.'.format(
+ self.dest_path))
+ else:
+ watched_containers = os.listdir(self.dest_path)
+ logger.info('Scalyr watcher agent found {} watched containers.'.format(len(watched_containers)))
+ logger.debug('Scalyr watcher agent found the following watched containers: {}'.format(watched_containers))
+
self.journald = None
journald_monitor = os.environ.get('WATCHER_SCALYR_JOURNALD', False)
@@ -109,7 +118,11 @@ class ScalyrAgent(BaseWatcher):
current_paths = self._get_current_log_paths()
new_paths = {log['path'] for log in self.logs}
- if self._first_run or new_paths.symmetric_difference(current_paths):
+ diff_paths = new_paths.symmetric_difference(current_paths)
+
+ if self._first_run or diff_paths:
+ logger.debug('Scalyr watcher agent new paths: {}'.format(diff_paths))
+ logger.debug('Scalyr watcher agent current paths: {}'.format(current_paths))
try:
config = self.tpl.render(**kwargs)
@@ -119,7 +132,8 @@ class ScalyrAgent(BaseWatcher):
logger.exception('Scalyr watcher agent failed to write config file.')
else:
self._first_run = False
- logger.info('Scalyr watcher agent updated config file {}'.format(self.config_path))
+ logger.info('Scalyr watcher agent updated config file {} with {} log targets.'.format(
+ self.config_path, len(diff_paths)))
def reset(self):
self.logs = []
@@ -155,10 +169,15 @@ class ScalyrAgent(BaseWatcher):
targets = set()
try:
- with open(self.config_path) as fp:
- config = json.load(fp)
- targets = {log.get('path') for log in config.get('logs', [])}
+ if os.path.exists(self.config_path):
+ with open(self.config_path) as fp:
+ config = json.load(fp)
+ targets = {log.get('path') for log in config.get('logs', [])}
+ logger.debug('Scalyr watcher agent loaded existing config {}: {} log targets exist!'.format(
+ self.config_path, len(config.get('logs', []))))
+ else:
+ logger.warning('Scalyr watcher agent cannot find config file!')
except:
- pass
+ logger.exception('Scalyr watcher agent failed to read config!')
return targets
diff --git a/kube_log_watcher/kube.py b/kube_log_watcher/kube.py
index ac312ab..6db5d05 100644
--- a/kube_log_watcher/kube.py
+++ b/kube_log_watcher/kube.py
@@ -30,7 +30,10 @@ def update_ca_certificate():
def get_client():
config = pykube.KubeConfig.from_service_account(DEFAULT_SERVICE_ACC)
- return pykube.HTTPClient(config)
+ client = pykube.HTTPClient(config)
+ client.session.trust_env = False
+
+ return client
def get_pods(kube_url=None, namespace=DEFAULT_NAMESPACE) -> list:
diff --git a/kube_log_watcher/templates/scalyr.json.jinja2 b/kube_log_watcher/templates/scalyr.json.jinja2
index b43c864..58abce4 100644
--- a/kube_log_watcher/templates/scalyr.json.jinja2
+++ b/kube_log_watcher/templates/scalyr.json.jinja2
@@ -13,6 +13,8 @@
{
"path": "{{ log.path }}",
+ "copy_from_start": true,
+
"attributes": {
"parser": "json"{% if log.attributes %},{% endif %}
{% for k, v in log.attributes.items() %}
@@ -45,7 +47,9 @@
},
{% endif %}
- "module": "scalyr_agent.builtin_monitors.journald_monitor"
+ "module": "scalyr_agent.builtin_monitors.journald_monitor",
+ "monitor_log_write_rate": 10000,
+ "monitor_log_max_write_burst": 200000
}
{% endif %}
]
diff --git a/requirements.txt b/requirements.txt
index 9b2b676..23b4c8a 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,2 +1,2 @@
Jinja2==2.8
-pykube==0.13.0
+pykube>=0.15.0
| Persist checkpoint file
Scalyr agent uses a checkpoint file to avoid log duplication. This should be highlighted/used in a Kubernetes cluster since the file should survive agent container restarts. | zalando-incubator/kubernetes-log-watcher | diff --git a/tests/test_scalyr.py b/tests/test_scalyr.py
index 1af390c..ad96269 100644
--- a/tests/test_scalyr.py
+++ b/tests/test_scalyr.py
@@ -61,11 +61,14 @@ def patch_env(monkeypatch, env):
def patch_os(monkeypatch):
makedirs = MagicMock()
symlink = MagicMock()
+ listdir = MagicMock()
+ listdir.return_value = []
monkeypatch.setattr('os.makedirs', makedirs)
monkeypatch.setattr('os.symlink', symlink)
+ monkeypatch.setattr('os.listdir', listdir)
- return makedirs, symlink
+ return makedirs, symlink, listdir
def patch_open(monkeypatch, exc=None):
@@ -81,6 +84,54 @@ def patch_open(monkeypatch, exc=None):
return mock_open, mock_fp
[email protected](
+ 'env,exists',
+ (
+ (
+ {
+ # No API KEY
+ 'WATCHER_SCALYR_DEST_PATH': SCALYR_DEST_PATH,
+ 'WATCHER_SCALYR_CONFIG_PATH': '/etc/config'
+ },
+ (True, True)
+ ),
+ (
+ {
+ # No Dest path
+ 'WATCHER_SCALYR_API_KEY': SCALYR_KEY,
+ },
+ (True, True)
+ ),
+ (
+ {
+ 'WATCHER_SCALYR_API_KEY': SCALYR_KEY, 'WATCHER_SCALYR_DEST_PATH': SCALYR_DEST_PATH,
+ 'WATCHER_SCALYR_CONFIG_PATH': '/etc/config'
+ },
+ # Config path does not exist
+ (False, True)
+ ),
+ (
+ {
+ 'WATCHER_SCALYR_API_KEY': SCALYR_KEY, 'WATCHER_SCALYR_DEST_PATH': SCALYR_DEST_PATH,
+ 'WATCHER_SCALYR_CONFIG_PATH': '/etc/config'
+ },
+ # Dest path does not exist
+ (True, False)
+ ),
+ )
+)
+def test_initialization_failure(monkeypatch, env, exists):
+ patch_env(monkeypatch, env)
+ patch_os(monkeypatch)
+
+ exists = MagicMock()
+ exists.side_effect = exists
+ monkeypatch.setattr('os.path.exists', exists)
+
+ with pytest.raises(RuntimeError):
+ ScalyrAgent(CLUSTER_ID, load_template)
+
+
@pytest.mark.parametrize('env', ENVS)
def test_add_log_target(monkeypatch, env, fx_scalyr):
patch_env(monkeypatch, env)
@@ -94,10 +145,10 @@ def test_add_log_target(monkeypatch, env, fx_scalyr):
kwargs['monitor_journald'] = {} if not env.get('WATCHER_SCALYR_JOURNALD') else SCALYR_MONITOR_JOURNALD
exists = MagicMock()
- exists.side_effect = (True, True, False, False)
+ exists.side_effect = (True, True, True, False, False, True)
monkeypatch.setattr('os.path.exists', exists)
- makedirs, symlink = patch_os(monkeypatch)
+ makedirs, symlink, listdir = patch_os(monkeypatch)
current_targets = MagicMock()
current_targets.return_value = []
@@ -125,12 +176,13 @@ def test_add_log_target(monkeypatch, env, fx_scalyr):
@pytest.mark.parametrize('env', ENVS)
def test_add_log_target_no_src(monkeypatch, env, fx_scalyr):
+ patch_os(monkeypatch)
patch_env(monkeypatch, env)
target = fx_scalyr['target']
exists = MagicMock()
- exists.side_effect = (True, False)
+ exists.side_effect = (True, True, False)
monkeypatch.setattr('os.path.exists', exists)
agent = ScalyrAgent(CLUSTER_ID, load_template)
@@ -155,10 +207,10 @@ def test_add_log_target_no_change(monkeypatch, env, fx_scalyr):
kwargs['monitor_journald'] = {} if not env.get('WATCHER_SCALYR_JOURNALD') else SCALYR_MONITOR_JOURNALD
exists = MagicMock()
- exists.side_effect = (True, True, False, False)
+ exists.side_effect = (True, True, True, False, False, True)
monkeypatch.setattr('os.path.exists', exists)
- makedirs, symlink = patch_os(monkeypatch)
+ makedirs, symlink, listdir = patch_os(monkeypatch)
log_path = kwargs['logs'][0]['path']
@@ -200,10 +252,10 @@ def test_flush_failure(monkeypatch, env, fx_scalyr):
kwargs['monitor_journald'] = {} if not env.get('WATCHER_SCALYR_JOURNALD') else SCALYR_MONITOR_JOURNALD
exists = MagicMock()
- exists.side_effect = (True, True, False, False)
+ exists.side_effect = (True, True, True, False, False, True)
monkeypatch.setattr('os.path.exists', exists)
- makedirs, symlink = patch_os(monkeypatch)
+ makedirs, symlink, listdir = patch_os(monkeypatch)
log_path = kwargs['logs'][0]['path']
@@ -250,10 +302,10 @@ def test_get_current_log_paths(monkeypatch, env, config, result):
monkeypatch.setattr('json.load', load)
exists = MagicMock()
- exists.side_effect = (True, True, False, False)
+ exists.side_effect = (True, True, True, False, False, True)
monkeypatch.setattr('os.path.exists', exists)
- makedirs, symlink = patch_os(monkeypatch)
+ makedirs, symlink, listdir = patch_os(monkeypatch)
agent = ScalyrAgent(CLUSTER_ID, load_template)
@@ -280,10 +332,11 @@ def test_get_current_log_paths(monkeypatch, env, config, result):
)
)
def test_remove_log_target(monkeypatch, env, exc):
+ patch_os(monkeypatch)
patch_env(monkeypatch, env)
exists = MagicMock()
- exists.side_effect = (True, True, False, False)
+ exists.side_effect = (True, True, True, False, False, True)
monkeypatch.setattr('os.path.exists', exists)
rmtree = MagicMock()
@@ -328,6 +381,8 @@ def test_remove_log_target(monkeypatch, env, exc):
'monitors': [
{
'module': 'scalyr_agent.builtin_monitors.journald_monitor',
+ 'monitor_log_write_rate': 10000,
+ 'monitor_log_max_write_burst': 200000,
}
]
},
@@ -335,7 +390,7 @@ def test_remove_log_target(monkeypatch, env, exc):
(
{
'scalyr_key': SCALYR_KEY, 'cluster_id': CLUSTER_ID,
- 'logs': [{'path': '/p1', 'attributes': {'a1': 'v1'}}],
+ 'logs': [{'path': '/p1', 'attributes': {'a1': 'v1'}, 'copy_from_start': True}],
'monitor_journald': {
'journal_path': '/var/log/journal',
'attributes': {'cluster': CLUSTER_ID, 'node': NODE},
@@ -347,10 +402,12 @@ def test_remove_log_target(monkeypatch, env, exc):
'implicit_metric_monitor': False,
'implicit_agent_process_metrics_monitor': False,
'server_attributes': {'serverHost': 'kube-cluster'},
- 'logs': [{'attributes': {'a1': 'v1', 'parser': 'json'}, 'path': '/p1'}],
+ 'logs': [{'attributes': {'a1': 'v1', 'parser': 'json'}, 'path': '/p1', 'copy_from_start': True}],
'monitors': [
{
'module': 'scalyr_agent.builtin_monitors.journald_monitor',
+ 'monitor_log_write_rate': 10000,
+ 'monitor_log_max_write_burst': 200000,
'journal_path': '/var/log/journal',
'attributes': {'cluster': CLUSTER_ID, 'node': NODE},
'extra_fields': {'_COMM': 'command'}
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 5
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest_cov",
"mock==2.0.0"
],
"pre_install": [
"apt-get update",
"apt-get install -y python3-dev libffi-dev libssl-dev"
],
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
charset-normalizer==2.0.12
coverage==6.2
idna==3.10
importlib-metadata==4.8.3
iniconfig==1.1.1
Jinja2==2.8
-e git+https://github.com/zalando-incubator/kubernetes-log-watcher.git@7b9c8c63c94785b04f9e4c6509d6cfeb6876763c#egg=kubernetes_log_watcher
MarkupSafe==2.0.1
mock==2.0.0
packaging==21.3
pbr==6.1.1
pluggy==1.0.0
py==1.11.0
pykube==0.13.0
pyparsing==3.1.4
pytest==7.0.1
pytest-cov==4.0.0
PyYAML==6.0.1
requests==2.27.1
six==1.17.0
tomli==1.2.3
typing_extensions==4.1.1
urllib3==1.26.20
zipp==3.6.0
| name: kubernetes-log-watcher
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- charset-normalizer==2.0.12
- coverage==6.2
- idna==3.10
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- jinja2==2.8
- markupsafe==2.0.1
- mock==2.0.0
- packaging==21.3
- pbr==6.1.1
- pluggy==1.0.0
- py==1.11.0
- pykube==0.13.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-cov==4.0.0
- pyyaml==6.0.1
- requests==2.27.1
- six==1.17.0
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- zipp==3.6.0
prefix: /opt/conda/envs/kubernetes-log-watcher
| [
"tests/test_scalyr.py::test_add_log_target[fx_scalyr0-env0]",
"tests/test_scalyr.py::test_add_log_target[fx_scalyr0-env1]",
"tests/test_scalyr.py::test_add_log_target[fx_scalyr0-env2]",
"tests/test_scalyr.py::test_add_log_target_no_change[fx_scalyr0-env0]",
"tests/test_scalyr.py::test_add_log_target_no_change[fx_scalyr0-env1]",
"tests/test_scalyr.py::test_add_log_target_no_change[fx_scalyr0-env2]",
"tests/test_scalyr.py::test_flush_failure[fx_scalyr0-env0]",
"tests/test_scalyr.py::test_flush_failure[fx_scalyr0-env1]",
"tests/test_scalyr.py::test_flush_failure[fx_scalyr0-env2]",
"tests/test_scalyr.py::test_tpl_render[kwargs1-expected1]",
"tests/test_scalyr.py::test_tpl_render[kwargs2-expected2]"
]
| []
| [
"tests/test_scalyr.py::test_initialization_failure[env0-exists0]",
"tests/test_scalyr.py::test_initialization_failure[env1-exists1]",
"tests/test_scalyr.py::test_initialization_failure[env2-exists2]",
"tests/test_scalyr.py::test_initialization_failure[env3-exists3]",
"tests/test_scalyr.py::test_add_log_target_no_src[fx_scalyr0-env0]",
"tests/test_scalyr.py::test_add_log_target_no_src[fx_scalyr0-env1]",
"tests/test_scalyr.py::test_add_log_target_no_src[fx_scalyr0-env2]",
"tests/test_scalyr.py::test_get_current_log_paths[env0-config0-result0]",
"tests/test_scalyr.py::test_get_current_log_paths[env1-Exception-result1]",
"tests/test_scalyr.py::test_remove_log_target[env0-None]",
"tests/test_scalyr.py::test_remove_log_target[env1-Exception]",
"tests/test_scalyr.py::test_tpl_render[kwargs0-expected0]"
]
| []
| MIT License | 1,123 | [
"README.rst",
"kube_log_watcher/templates/scalyr.json.jinja2",
"kube_log_watcher/agents/scalyr.py",
"kube_log_watcher/kube.py",
"requirements.txt"
]
| [
"README.rst",
"kube_log_watcher/templates/scalyr.json.jinja2",
"kube_log_watcher/agents/scalyr.py",
"kube_log_watcher/kube.py",
"requirements.txt"
]
|
ARMmbed__yotta-802 | ae1cda2082f6f82c1c9f80f6194fcae62d228bc1 | 2017-03-28 19:14:56 | ae1cda2082f6f82c1c9f80f6194fcae62d228bc1 | diff --git a/docs/reference/buildsystem.md b/docs/reference/buildsystem.md
index e728c9f..6cc8bd9 100644
--- a/docs/reference/buildsystem.md
+++ b/docs/reference/buildsystem.md
@@ -30,7 +30,7 @@ The name of the library being built by the current module is available as
No header needs to be included for this definition to be available.
Use the [preprocessor stringification
-trick](https://gcc.gnu.org/onlinedocs/cpp/Stringizing.html) to get the
+trick](https://gcc.gnu.org/onlinedocs/cpp/Stringification.html) to get the
module name as a string, if desired. Note that this definition is **not**
currently available when compiling tests, and there are other circumstances
where using custom CMake can make it unavailable.
diff --git a/docs/reference/commands.md b/docs/reference/commands.md
index 7269eca..30ee1bf 100755
--- a/docs/reference/commands.md
+++ b/docs/reference/commands.md
@@ -534,7 +534,7 @@ example:
## <a href="#yotta-uninstall" name="yotta-uninstall">#</a> yotta uninstall
-Synonyms: `yotta unlink`, `yotta rm`
+Synonyms: `yotta unlink`, `yotta rm`, `yotta un`
#### Synopsis
```
diff --git a/docs/reference/config.md b/docs/reference/config.md
index d56a2f5..43be6bb 100644
--- a/docs/reference/config.md
+++ b/docs/reference/config.md
@@ -267,7 +267,7 @@ definitions will be produced:
Note that string values are not quoted. If you want a quoted string,
either embed escaped quotes (`\"`) in the string value, or use the preprocessor
[stringification
-trick](https://gcc.gnu.org/onlinedocs/cpp/Stringizing.html).
+trick](https://gcc.gnu.org/onlinedocs/cpp/Stringification.html).
JSON boolean values are converted to 1 or 0, and `null` values are converted to `NULL`.
diff --git a/docs/reference/module.md b/docs/reference/module.md
index 4bc38c5..57498c1 100755
--- a/docs/reference/module.md
+++ b/docs/reference/module.md
@@ -198,6 +198,10 @@ To specify a dependency on a github module, use one of the following forms:
Uses the latest committed version on the specified branch.
+ * `"usefulmodule": "username/repositoryname#commit-id"`
+
+ Uses the specified commit ID.
+
#### Depending on git Modules
To specify a module available from a non-Github git server as a dependency, use
a git URL:
@@ -206,8 +210,9 @@ a git URL:
* `"usefulmodule": "git+ssh://somwhere.com/anything/anywhere#<version specification>"`
* `"usefulmodule": "git+ssh://somwhere.com/anything/anywhere#<branch name>"`
* `"usefulmodule": "git+ssh://somwhere.com/anything/anywhere#<tag name>"`
+ * `"usefulmodule": "git+ssh://somwhere.com/anything/anywhere#<commit id>"`
* `"usefulmodule": "<anything>://somwhere.git"`
- * `"usefulmodule": "<anything>://somwhere.git#<version spec, tag, or branch name>"`
+ * `"usefulmodule": "<anything>://somwhere.git#<version spec, tag, branch name or commit id>"`
#### Depending on hg Modules
To specify a module available from a mercurial server as a dependency, use
diff --git a/docs/tutorial/privaterepos.md b/docs/tutorial/privaterepos.md
index 471d2de..630ad9a 100644
--- a/docs/tutorial/privaterepos.md
+++ b/docs/tutorial/privaterepos.md
@@ -24,18 +24,33 @@ Sometimes it may not be appropriate publish a module to the public package regis
The shorthand GitHub URL is formed of two parts: `<username>/<reponame>` where `<username>` is the GitHub user or organisation name of the repository owner and `<reponame>` is the name of the repositiry. e.g. the `yotta` repositry can be found at `ARMmbed/yotta`.
-You can specify a particular branch or tag to use by providing it in the URL. The supported GitHub URL formats are:
+You can specify a particular branch, tag or commit to use by providing it in the URL. The supported GitHub URL formats are:
```
username/reponame
username/reponame#<versionspec>
username/reponame#<branchname>
username/reponame#<tagname>
+username/reponame#<commit>
+https://github.com/username/reponame
+https://github.com/username/reponame#<branchname>
+https://github.com/username/reponame#<tagname>
+https://github.com/username/reponame#<commit>
```
+If the GitHub repository is public, the dependency will simply be downloaded. If the GitHub repository is private and this is the first time you are downloading from a private GitHub repository, you will be prompted to log in to GitHub using a URL.
+
+If you have a private GitHub repository and you would prefer to download it using SSH keys, you can use the following dependency form:
+
+```
[email protected]:username/reponame.git
[email protected]:username/reponame.git#<branchname>
[email protected]:username/reponame.git#<tagname>
[email protected]:username/reponame.git#<commit>
+```
###Other ways to depend on private repositories
-Using shorthand GitHub URLs is the easiest and reccomneded method of working with private repositories, however as not all projects are hosted on GitHub, `yotta` supports using git and hg URLs directly as well.
+Using shorthand GitHub URLs is the easiest and recommended method of working with private repositories, however as not all projects are hosted on GitHub, `yotta` supports using git and hg URLs directly as well.
For example, to include a privately hosted git repository from example.com:
@@ -47,13 +62,13 @@ For example, to include a privately hosted git repository from example.com:
...
```
-Git URLs support branch, version and tags specifications:
+Git URLs support branch, version, tag and commit specifications:
```
git+ssh://example.com/path/to/repo
-git+ssh://example.com/path/to/repo#<versionspec, branch or tag>
+git+ssh://example.com/path/to/repo#<versionspec, branch, tag or commit>
anything://example.com/path/to/repo.git
-anything://example.com/path/to/repo.git#<versionspec, branch or tag>
+anything://example.com/path/to/repo.git#<versionspec, branch, tag or commit>
```
Currently, mercurial URLs only support a version specification:
diff --git a/yotta/install.py b/yotta/install.py
index 28e2816..1ed7d8a 100644
--- a/yotta/install.py
+++ b/yotta/install.py
@@ -167,7 +167,12 @@ def installComponentAsDependency(args, current_component):
# (if it is not already present), and write that back to disk. Without
# writing to disk the dependency wouldn't be usable.
if installed and not current_component.hasDependency(component_name):
- saved_spec = current_component.saveDependency(installed)
+ vs = sourceparse.parseSourceURL(component_spec)
+ if vs.source_type == 'registry':
+ saved_spec = current_component.saveDependency(installed)
+ else:
+ saved_spec = current_component.saveDependency(installed, component_spec)
+
current_component.writeDescription()
logging.info('dependency %s: %s written to module.json', component_name, saved_spec)
else:
diff --git a/yotta/lib/access.py b/yotta/lib/access.py
index ae8fcac..51dec3b 100644
--- a/yotta/lib/access.py
+++ b/yotta/lib/access.py
@@ -147,8 +147,14 @@ def latestSuitableVersion(name, version_required, registry='modules', quiet=Fals
)
if v:
return v
+
+ # we have passed a specific commit ID:
+ v = remote_component.commitVersion()
+ if v:
+ return v
+
raise access_common.Unavailable(
- 'Github repository "%s" does not have any tags or branches matching "%s"' % (
+ 'Github repository "%s" does not have any tags, branches or commits matching "%s"' % (
version_required, remote_component.tagOrBranchSpec()
)
)
@@ -189,8 +195,14 @@ def latestSuitableVersion(name, version_required, registry='modules', quiet=Fals
)
if v:
return v
+
+ # we have passed a specific commit ID:
+ v = local_clone.commitVersion(remote_component.tagOrBranchSpec())
+ if v:
+ return v
+
raise access_common.Unavailable(
- '%s repository "%s" does not have any tags or branches matching "%s"' % (
+ '%s repository "%s" does not have any tags, branches or commits matching "%s"' % (
clone_type, version_required, spec
)
)
diff --git a/yotta/lib/git_access.py b/yotta/lib/git_access.py
index f53e19f..4b2bbb1 100644
--- a/yotta/lib/git_access.py
+++ b/yotta/lib/git_access.py
@@ -73,8 +73,18 @@ class GitWorkingCopy(object):
def tipVersion(self):
- return GitCloneVersion('', '', self)
+ raise NotImplementedError
+ def commitVersion(self, spec):
+ ''' return a GithubComponentVersion object for a specific commit if valid
+ '''
+ import re
+
+ commit_match = re.match('^[a-f0-9]{7,40}$', spec, re.I)
+ if commit_match:
+ return GitCloneVersion('', spec, self)
+
+ return None
class GitComponent(access_common.RemoteComponent):
def __init__(self, url, tag_or_branch=None, semantic_spec=None):
diff --git a/yotta/lib/github_access.py b/yotta/lib/github_access.py
index e2d47a1..b1f293f 100644
--- a/yotta/lib/github_access.py
+++ b/yotta/lib/github_access.py
@@ -141,6 +141,12 @@ def _getTipArchiveURL(repo):
repo = g.get_repo(repo)
return repo.get_archive_link('tarball')
+@_handleAuth
+def _getCommitArchiveURL(repo, commit):
+ ''' return a string containing a tarball url '''
+ g = Github(settings.getProperty('github', 'authtoken'))
+ repo = g.get_repo(repo)
+ return repo.get_archive_link('tarball', commit)
@_handleAuth
def _getTarball(url, into_directory, cache_key, origin_info=None):
@@ -283,6 +289,19 @@ class GithubComponent(access_common.RemoteComponent):
'', '', _getTipArchiveURL(self.repo), self.name, cache_key=None
)
+ def commitVersion(self):
+ ''' return a GithubComponentVersion object for a specific commit if valid
+ '''
+ import re
+
+ commit_match = re.match('^[a-f0-9]{7,40}$', self.tagOrBranchSpec(), re.I)
+ if commit_match:
+ return GithubComponentVersion(
+ '', '', _getCommitArchiveURL(self.repo, self.tagOrBranchSpec()), self.name, cache_key=None
+ )
+
+ return None
+
@classmethod
def remoteType(cls):
return 'github'
diff --git a/yotta/lib/sourceparse.py b/yotta/lib/sourceparse.py
index 1b1176e..0f451ad 100644
--- a/yotta/lib/sourceparse.py
+++ b/yotta/lib/sourceparse.py
@@ -51,39 +51,55 @@ class VersionSource(object):
return self.semantic_spec.match(v)
-def _splitFragment(url):
- parsed = urlsplit(url)
- if '#' in url:
- return url[:url.index('#')], parsed.fragment
- else:
- return url, None
-
-def _getGithubRef(source_url):
+def _getNonRegistryRef(source_url):
import re
+
# something/something#spec = github
- defragmented, fragment = _splitFragment(source_url)
- github_match = re.match('^[a-z0-9_-]+/([a-z0-9_-]+)$', defragmented, re.I)
+ # something/something@spec = github
+ # something/something spec = github
+ github_match = re.match('^([.a-z0-9_-]+/([.a-z0-9_-]+)) *[@#]?([.a-z0-9_\-\*\^\~\>\<\=]*)$', source_url, re.I)
if github_match:
- return github_match.group(1), VersionSource('github', defragmented, fragment)
+ return github_match.group(2), VersionSource('github', github_match.group(1), github_match.group(3))
- # something/something@spec = github
- alternate_github_match = re.match('([a-z0-9_-]+/([a-z0-9_-]+)) *@?([~^><=.0-9a-z\*-]*)$', source_url, re.I)
- if alternate_github_match:
- return alternate_github_match.group(2), VersionSource('github', alternate_github_match.group(1), alternate_github_match.group(3))
+ parsed = urlsplit(source_url)
+
+ # github
+ if parsed.netloc.endswith('github.com'):
+ # any URL onto github should be fetched over the github API, even if it
+ # would parse as a valid git URL
+ name_match = re.match('^/([.a-z0-9_-]+/([.a-z0-9_-]+?))(.git)?$', parsed.path, re.I)
+ if name_match:
+ return name_match.group(2), VersionSource('github', name_match.group(1), parsed.fragment)
+
+ if '#' in source_url:
+ without_fragment = source_url[:source_url.index('#')]
+ else:
+ without_fragment = source_url
+
+ # git
+ if parsed.scheme.startswith('git+') or parsed.path.endswith('.git'):
+ # git+anything://anything or anything.git is a git repo:
+ name_match = re.match('^.*?([.a-z0-9_-]+?)(.git)?$', parsed.path, re.I)
+ if name_match:
+ return name_match.group(1), VersionSource('git', without_fragment, parsed.fragment)
+
+ # mercurial
+ if parsed.scheme.startswith('hg+') or parsed.path.endswith('.hg'):
+ # hg+anything://anything or anything.hg is a hg repo:
+ name_match = re.match('^.*?([.a-z0-9_-]+?)(.hg)?$', parsed.path, re.I)
+ if name_match:
+ return name_match.group(1), VersionSource('hg', without_fragment, parsed.fragment)
return None, None
+
def parseSourceURL(source_url):
''' Parse the specified version source URL (or version spec), and return an
instance of VersionSource
'''
- import re
- parsed = urlsplit(source_url)
-
- if '#' in source_url:
- without_fragment = source_url[:source_url.index('#')]
- else:
- without_fragment = source_url
+ name, spec = _getNonRegistryRef(source_url)
+ if spec:
+ return spec
try:
url_is_spec = version.Spec(source_url)
@@ -94,22 +110,6 @@ def parseSourceURL(source_url):
# if the url is an unadorned version specification (including an empty
# string) then the source is the module registry:
return VersionSource('registry', '', source_url)
- elif parsed.netloc.endswith('github.com'):
- # any URL onto github should be fetched over the github API, even if it
- # would parse as a valid git URL
- return VersionSource('github', parsed.path, parsed.fragment)
- elif parsed.scheme.startswith('git+') or parsed.path.endswith('.git'):
- # git+anything://anything or anything.git is a git repo:
- return VersionSource('git', without_fragment, parsed.fragment)
- elif parsed.scheme.startswith('hg+') or parsed.path.endswith('.hg'):
- # hg+anything://anything or anything.hg is a hg repo:
- return VersionSource('hg', without_fragment, parsed.fragment)
-
- # something/something@spec = github
- # something/something#spec = github
- module_name, github_match = _getGithubRef(source_url)
- if github_match:
- return github_match
raise InvalidVersionSpec("Invalid version specification: \"%s\"" % (source_url))
@@ -143,8 +143,8 @@ def parseTargetNameAndSpec(target_name_and_spec):
import re
# fist check if this is a raw github specification that we can get the
# target name from:
- name, spec = _getGithubRef(target_name_and_spec)
- if name and spec:
+ name, spec = _getNonRegistryRef(target_name_and_spec)
+ if name:
return name, target_name_and_spec
# next split at the first @ or , if any
@@ -178,8 +178,8 @@ def parseModuleNameAndSpec(module_name_and_spec):
import re
# fist check if this is a raw github specification that we can get the
# module name from:
- name, spec = _getGithubRef(module_name_and_spec)
- if name and spec:
+ name, spec = _getNonRegistryRef(module_name_and_spec)
+ if name:
return name, module_name_and_spec
# next split at the first @, if any
diff --git a/yotta/link.py b/yotta/link.py
index d13263d..8275e4f 100644
--- a/yotta/link.py
+++ b/yotta/link.py
@@ -11,9 +11,6 @@ def addOptions(parser):
)
def tryLink(src, dst):
- # standard library modules, , ,
- import logging
-
# fsutils, , misc filesystem utils, internal
from yotta.lib import fsutils
try:
diff --git a/yotta/link_target.py b/yotta/link_target.py
index e67de6a..0ad10dd 100644
--- a/yotta/link_target.py
+++ b/yotta/link_target.py
@@ -11,9 +11,6 @@ def addOptions(parser):
)
def tryLink(src, dst):
- # standard library modules, , ,
- import logging
-
# fsutils, , misc filesystem utils, internal
from yotta.lib import fsutils
try:
diff --git a/yotta/main.py b/yotta/main.py
index c18cd72..12f6839 100644
--- a/yotta/main.py
+++ b/yotta/main.py
@@ -201,6 +201,7 @@ def main():
short_commands = {
'up':subparser.choices['update'],
'in':subparser.choices['install'],
+ 'un':subparser.choices['uninstall'],
'ln':subparser.choices['link'],
'v':subparser.choices['version'],
'ls':subparser.choices['list'],
| Install yotta modules from github with git credentials
I'd like to do following but it fails:
```
$ yotta install [email protected]:ARMmbed/module-x.git
info: get versions for git
Fatal Exception, yotta=0.17.2
Traceback (most recent call last):
File "/home/jaakor01/workspace/yotta_issue/venv/bin/yotta", line 4, in <module>
yotta.main()
File "/home/jaakor01/workspace/yotta_issue/venv/local/lib/python2.7/site-packages/yotta/main.py", line 61, in wrapped
return fn(*args, **kwargs)
File "/home/jaakor01/workspace/yotta_issue/venv/local/lib/python2.7/site-packages/yotta/main.py", line 46, in wrapped
return fn(*args, **kwargs)
File "/home/jaakor01/workspace/yotta_issue/venv/local/lib/python2.7/site-packages/yotta/main.py", line 243, in main
status = args.command(args, following_args)
File "/home/jaakor01/workspace/yotta_issue/venv/local/lib/python2.7/site-packages/yotta/install.py", line 62, in execCommand
return installComponent(args)
File "/home/jaakor01/workspace/yotta_issue/venv/local/lib/python2.7/site-packages/yotta/install.py", line 202, in installComponent
working_directory = path
File "/home/jaakor01/workspace/yotta_issue/venv/local/lib/python2.7/site-packages/yotta/lib/access.py", line 385, in satisfyVersion
name, version_required, working_directory, type=type, inherit_shrinkwrap = inherit_shrinkwrap
File "/home/jaakor01/workspace/yotta_issue/venv/local/lib/python2.7/site-packages/yotta/lib/access.py", line 314, in satisfyVersionByInstalling
v = latestSuitableVersion(name, version_required, _registryNamespaceForType(type))
File "/home/jaakor01/workspace/yotta_issue/venv/local/lib/python2.7/site-packages/yotta/lib/access.py", line 159, in latestSuitableVersion
local_clone = remote_component.clone()
File "/home/jaakor01/workspace/yotta_issue/venv/local/lib/python2.7/site-packages/yotta/lib/git_access.py", line 114, in clone
clone = vcs.Git.cloneToTemporaryDir(self.url)
File "/home/jaakor01/workspace/yotta_issue/venv/local/lib/python2.7/site-packages/yotta/lib/vcs.py", line 62, in cloneToTemporaryDir
return cls.cloneToDirectory(remote, tempfile.mkdtemp())
File "/home/jaakor01/workspace/yotta_issue/venv/local/lib/python2.7/site-packages/yotta/lib/vcs.py", line 69, in cloneToDirectory
cls._execCommands(commands)
File "/home/jaakor01/workspace/yotta_issue/venv/local/lib/python2.7/site-packages/yotta/lib/vcs.py", line 146, in _execCommands
raise VCSError("command failed: %s" % (err or out), returncode=returncode, command=cmd)
yotta.lib.vcs.VCSError: command failed: Cloning into '/tmp/tmpi_bJ_8'...
Permission denied (publickey).
fatal: Could not read from remote repository.
Please make sure you have the correct access rights
and the repository exists.
```
However, if I add the same address manually to `module.json` it works with `yotta update`.
```
"dependencies": {
"module-x":"[email protected]:ARMmbed/module-x.git#master"
},
``` | ARMmbed/yotta | diff --git a/yotta/test/test_sourceparse.py b/yotta/test/test_sourceparse.py
index 412f4ad..0b7af6f 100644
--- a/yotta/test/test_sourceparse.py
+++ b/yotta/test/test_sourceparse.py
@@ -11,50 +11,60 @@ import unittest
# sourceparse, , parse version source urls, internal
from yotta.lib import sourceparse
-
-Registry_URLs = [
- '',
- '*',
- '1.2.3',
- '>=1.2.3',
- '^0.1.2',
- '~0.1.2',
+# Shorthand URLs for GitHub
+ShortHand_URLs = [
+ 'username/reponame',
]
+# Longhand URLs for GitHub
Github_URLs = [
- 'username/reponame',
- 'username/reponame#1.2.3',
- 'username/reponame#^1.2.3',
- 'username/reponame#-1.2.3',
- 'username/reponame#branch-or-tag-name',
- 'username/[email protected]',
- 'username/reponame@^1.2.3',
- 'username/[email protected]',
- 'username/reponame@branch-or-tag-name',
+ 'https://github.com/username/reponame.git',
+ 'git://github.com/username/reponame.git',
+ 'git+http://[email protected]/username/reponame.git',
+ 'git+https://[email protected]/username/reponame.git',
]
Git_URLs = [
- 'git+ssh://somewhere.com/something/etc/etc',
- 'git+ssh://somewhere.com/something/etc/etc#1.2.3',
- 'git+ssh://somewhere.com/something/etc/etc#^1.2.3',
- 'git+ssh://somewhere.com/something/etc/etc#~1.2.3',
- 'git+ssh://somewhere.com/something/etc/etc#branch-name',
- 'ssh://somewhere.com/something/etc/etc.git',
- 'ssh://somewhere.com/something/etc/etc.git#^1.2.3',
- 'ssh://somewhere.com/something/etc/etc.git#~1.2.3',
- 'ssh://somewhere.com/something/etc/etc.git#branch-name',
- 'http://somewhere.something/something.git',
+ 'http://somewhere.something/reponame.git',
+ 'https://somewhere.something/reponame.git',
+ 'ssh://somewhere.com/something/etc/reponame.git',
+ 'git+ssh://somewhere.com/something/etc/reponame',
+ '[email protected]:username/reponame.git',
]
HG_URLs = [
- 'hg+ssh://somewhere.com/something/etc/etc',
- 'hg+ssh://somewhere.com/something/etc/etc#1.2.3',
- 'hg+ssh://somewhere.com/something/etc/etc#^1.2.3',
- 'hg+ssh://somewhere.com/something/etc/etc#~1.2.3',
- 'ssh://somewhere.com/something/etc/etc.hg',
- 'ssh://somewhere.com/something/etc/etc.hg#^1.2.3',
- 'ssh://somewhere.com/something/etc/etc.hg#~1.2.3',
- 'http://somewhere.something/something.hg',
+ 'http://somewhere.something/reponame.hg',
+ 'https://somewhere.something/reponame.hg',
+ 'ssh://somewhere.com/something/etc/reponame.hg',
+ 'hg+ssh://somewhere.com/something/etc/reponame',
+]
+
+# We support version spec, branch name, tag name and commit id for GitHub and Git
+Git_Specs = [
+ '',
+ '1.2.3',
+ '^1.2.3',
+ '~1.2.3',
+ '-1.2.3',
+ 'branch-or-tag-name',
+ 'd5f5049',
+]
+
+# We support only version spec for HG
+HG_Specs = [
+ '',
+ '1.2.3',
+ '^1.2.3',
+ '~1.2.3',
+]
+
+Registry_Specs = [
+ '',
+ '*',
+ '1.2.3',
+ '>=1.2.3',
+ '^0.1.2',
+ '~0.1.2',
]
test_invalid_urls = [
@@ -65,24 +75,56 @@ test_invalid_urls = [
class TestParseSourceURL(unittest.TestCase):
def test_registryURLs(self):
- for url in Registry_URLs:
+ for url in Registry_Specs:
sv = sourceparse.parseSourceURL(url)
self.assertEqual(sv.source_type, 'registry')
+ def test_shorthandURLs(self):
+ for url in ShortHand_URLs:
+ for s in Git_Specs:
+ if len(s):
+ # Shorthand URLs support '@' and ' ' as well as '#'
+ for m in ['#', '@', ' ']:
+ sv = sourceparse.parseSourceURL(url + m + s)
+ self.assertEqual(sv.source_type, 'github')
+ self.assertEqual(sv.spec, s)
+ else:
+ sv = sourceparse.parseSourceURL(url)
+ self.assertEqual(sv.source_type, 'github')
+ self.assertEqual(sv.spec, s)
+
def test_githubURLs(self):
for url in Github_URLs:
- sv = sourceparse.parseSourceURL(url)
- self.assertEqual(sv.source_type, 'github')
+ for s in Git_Specs:
+ if len(s):
+ source = url + '#' + s
+ else:
+ source = url
+ sv = sourceparse.parseSourceURL(source)
+ self.assertEqual(sv.source_type, 'github')
+ self.assertEqual(sv.spec, s)
def test_gitURLs(self):
for url in Git_URLs:
- sv = sourceparse.parseSourceURL(url)
- self.assertEqual(sv.source_type, 'git')
+ for s in Git_Specs:
+ if len(s):
+ source = url + '#' + s
+ else:
+ source = url
+ sv = sourceparse.parseSourceURL(source)
+ self.assertEqual(sv.source_type, 'git')
+ self.assertEqual(sv.spec, s)
def test_hgURLs(self):
for url in HG_URLs:
- sv = sourceparse.parseSourceURL(url)
- self.assertEqual(sv.source_type, 'hg')
+ for s in HG_Specs:
+ if len(s):
+ source = url + '#' + s
+ else:
+ source = url
+ sv = sourceparse.parseSourceURL(source)
+ self.assertEqual(sv.source_type, 'hg')
+ self.assertEqual(sv.spec, s)
def test_invalid(self):
for url in test_invalid_urls:
@@ -101,38 +143,63 @@ class TestParseModuleNameAndSpec(unittest.TestCase):
self.assertEqual(n, name)
self.assertEqual(s, '*')
+ def test_ShorthandRefs(self):
+ for url in ShortHand_URLs:
+ for spec in Git_Specs:
+ if len(spec):
+ # Shorthand URLs support '@' and ' ' as well as '#'
+ for m in ['#', '@', ' ']:
+ ns = url + m + spec
+ n, s = sourceparse.parseModuleNameAndSpec(ns)
+ self.assertEqual(n, 'reponame')
+ self.assertEqual(s, ns)
+ else:
+ n, s = sourceparse.parseModuleNameAndSpec(url)
+ self.assertEqual(n, 'reponame')
+ self.assertEqual(s, url)
+
def test_GithubRefs(self):
for url in Github_URLs:
- n, s = sourceparse.parseModuleNameAndSpec(url)
- self.assertEqual(n, 'reponame')
+ for spec in Git_Specs:
+ if len(spec):
+ ns = url + '#' + spec
+ else:
+ ns = url
+ n, s = sourceparse.parseModuleNameAndSpec(ns)
+ self.assertEqual(n, 'reponame')
+ self.assertEqual(s, ns)
+
+ def test_GitRefs(self):
+ for url in Git_URLs:
+ for spec in Git_Specs:
+ if len(spec):
+ ns = url + '#' + spec
+ else:
+ ns = url
+ n, s = sourceparse.parseModuleNameAndSpec(ns)
+ self.assertEqual(n, 'reponame')
+ self.assertEqual(s, ns)
+
+ def test_HGRefs(self):
+ for url in HG_URLs:
+ for spec in HG_Specs:
+ if len(spec):
+ ns = url + '#' + spec
+ else:
+ ns = url
+ n, s = sourceparse.parseModuleNameAndSpec(ns)
+ self.assertEqual(n, 'reponame')
+ self.assertEqual(s, ns)
def test_atVersion(self):
for name in Valid_Names:
- for v in Registry_URLs:
+ for v in Registry_Specs:
if len(v):
nv = name + '@' + v
n, s = sourceparse.parseModuleNameAndSpec(nv)
self.assertEqual(n, name)
self.assertEqual(s, v)
- def test_atGitURL(self):
- for name in Valid_Names:
- for v in Git_URLs:
- nv = name + '@' + v
- n, s = sourceparse.parseModuleNameAndSpec(nv)
- self.assertEqual(n, name)
- self.assertEqual(s, v)
-
- def test_atHGURL(self):
- for name in Valid_Names:
- for v in HG_URLs:
- nv = name + '@' + v
- n, s = sourceparse.parseModuleNameAndSpec(nv)
- self.assertEqual(n, name)
- self.assertEqual(s, v)
-
if __name__ == '__main__':
unittest.main()
-
-
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 13
} | 0.17 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest pytest-cov pytest-xdist pytest-mock pytest-asyncio"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.4",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | argcomplete==1.12.3
attrs==22.2.0
certifi==2021.5.30
cffi==1.15.1
charset-normalizer==2.0.12
colorama==0.3.9
coverage==6.2
cryptography==40.0.2
Deprecated==1.2.18
execnet==1.9.0
future==1.0.0
hgapi==1.7.4
idna==3.10
importlib-metadata==4.8.3
iniconfig==1.1.1
intelhex==2.3.0
intervaltree==3.1.0
Jinja2==2.11.3
jsonpointer==1.14
jsonschema==2.6.0
MarkupSafe==2.0.1
mbed-test-wrapper==1.0.0
packaging==21.3
pathlib==1.0.1
pluggy==1.0.0
project-generator==0.8.17
project-generator-definitions==0.2.46
py==1.11.0
pycparser==2.21
pyelftools==0.23
PyGithub==1.54.1
PyJWT==1.7.1
pyocd==0.15.0
pyparsing==3.1.4
pytest==7.0.1
pytest-asyncio==0.16.0
pytest-cov==4.0.0
pytest-mock==3.6.1
pytest-xdist==3.0.2
pyusb==1.2.1
PyYAML==3.13
requests==2.27.1
semantic-version==2.10.0
six==1.17.0
sortedcontainers==2.4.0
tomli==1.2.3
typing_extensions==4.1.1
urllib3==1.26.20
valinor==0.0.15
websocket-client==1.3.1
wrapt==1.16.0
xmltodict==0.14.2
-e git+https://github.com/ARMmbed/yotta.git@ae1cda2082f6f82c1c9f80f6194fcae62d228bc1#egg=yotta
zipp==3.6.0
| name: yotta
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- argcomplete==1.12.3
- argparse==1.4.0
- attrs==22.2.0
- cffi==1.15.1
- charset-normalizer==2.0.12
- colorama==0.3.9
- coverage==6.2
- cryptography==40.0.2
- deprecated==1.2.18
- execnet==1.9.0
- future==1.0.0
- hgapi==1.7.4
- idna==3.10
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- intelhex==2.3.0
- intervaltree==3.1.0
- jinja2==2.11.3
- jsonpointer==1.14
- jsonschema==2.6.0
- markupsafe==2.0.1
- mbed-test-wrapper==1.0.0
- packaging==21.3
- pathlib==1.0.1
- pluggy==1.0.0
- project-generator==0.8.17
- project-generator-definitions==0.2.46
- py==1.11.0
- pycparser==2.21
- pyelftools==0.23
- pygithub==1.54.1
- pyjwt==1.7.1
- pyocd==0.15.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-asyncio==0.16.0
- pytest-cov==4.0.0
- pytest-mock==3.6.1
- pytest-xdist==3.0.2
- pyusb==1.2.1
- pyyaml==3.13
- requests==2.27.1
- semantic-version==2.10.0
- six==1.17.0
- sortedcontainers==2.4.0
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- valinor==0.0.15
- websocket-client==1.3.1
- wrapt==1.16.0
- xmltodict==0.14.2
- zipp==3.6.0
prefix: /opt/conda/envs/yotta
| [
"yotta/test/test_sourceparse.py::TestParseSourceURL::test_shorthandURLs",
"yotta/test/test_sourceparse.py::TestParseModuleNameAndSpec::test_GitRefs",
"yotta/test/test_sourceparse.py::TestParseModuleNameAndSpec::test_GithubRefs",
"yotta/test/test_sourceparse.py::TestParseModuleNameAndSpec::test_HGRefs"
]
| []
| [
"yotta/test/test_sourceparse.py::TestParseSourceURL::test_gitURLs",
"yotta/test/test_sourceparse.py::TestParseSourceURL::test_githubURLs",
"yotta/test/test_sourceparse.py::TestParseSourceURL::test_hgURLs",
"yotta/test/test_sourceparse.py::TestParseSourceURL::test_invalid",
"yotta/test/test_sourceparse.py::TestParseSourceURL::test_registryURLs",
"yotta/test/test_sourceparse.py::TestParseModuleNameAndSpec::test_ShorthandRefs",
"yotta/test/test_sourceparse.py::TestParseModuleNameAndSpec::test_atVersion",
"yotta/test/test_sourceparse.py::TestParseModuleNameAndSpec::test_validNames"
]
| []
| Apache License 2.0 | 1,124 | [
"docs/reference/commands.md",
"yotta/lib/sourceparse.py",
"yotta/lib/github_access.py",
"docs/reference/config.md",
"docs/reference/module.md",
"yotta/lib/access.py",
"docs/tutorial/privaterepos.md",
"yotta/link_target.py",
"yotta/lib/git_access.py",
"yotta/main.py",
"yotta/link.py",
"docs/reference/buildsystem.md",
"yotta/install.py"
]
| [
"docs/reference/commands.md",
"yotta/lib/sourceparse.py",
"yotta/lib/github_access.py",
"docs/reference/config.md",
"docs/reference/module.md",
"yotta/lib/access.py",
"docs/tutorial/privaterepos.md",
"yotta/link_target.py",
"yotta/lib/git_access.py",
"yotta/main.py",
"yotta/link.py",
"docs/reference/buildsystem.md",
"yotta/install.py"
]
|
|
EMCECS__python-ecsclient-30 | 546e342440f7ca0274d222179c6eca790d1d6573 | 2017-03-29 10:38:47 | 40a22c4eb202e60d1c8fe6a7f925f4783b0d6a82 | coveralls:
[](https://coveralls.io/builds/10825849)
Coverage increased (+0.3%) to 55.697% when pulling **799e48d6f2d54291c624f05e5a8774b4fbcf0da3 on bug-logout** into **546e342440f7ca0274d222179c6eca790d1d6573 on master**.
coveralls:
[](https://coveralls.io/builds/10825849)
Coverage increased (+0.3%) to 55.697% when pulling **799e48d6f2d54291c624f05e5a8774b4fbcf0da3 on bug-logout** into **546e342440f7ca0274d222179c6eca790d1d6573 on master**.
coveralls:
[](https://coveralls.io/builds/10825849)
Coverage increased (+0.3%) to 55.697% when pulling **799e48d6f2d54291c624f05e5a8774b4fbcf0da3 on bug-logout** into **546e342440f7ca0274d222179c6eca790d1d6573 on master**.
| diff --git a/ecsclient/authentication.py b/ecsclient/authentication.py
index 0e8ae84..31fb0c4 100644
--- a/ecsclient/authentication.py
+++ b/ecsclient/authentication.py
@@ -23,6 +23,11 @@ class Authentication(object):
:param force: If you have multiple sessions running simultaneously this
forces the termination of all tokens to the current user
"""
+
+ if not self.conn.get_current_token():
+ log.warning('Not logging out since the client has no token set up')
+ return
+
params = {
'force': force
}
diff --git a/ecsclient/baseclient.py b/ecsclient/baseclient.py
index e9dd6cf..485238a 100644
--- a/ecsclient/baseclient.py
+++ b/ecsclient/baseclient.py
@@ -84,11 +84,20 @@ class Client(object):
"""
return self._token_request.get_new_token()
+ def get_current_token(self):
+ """
+ Get the current token in use. None if the client is logged out or not yet logged in
+ """
+ return self._token_request.token
+
def remove_cached_token(self):
"""
Remove the cached token file, this is useful if you switch users
and want to use a different token
"""
+ self.token = None
+ self._token_request.token = None
+
if os.path.isfile(self.token_path):
log.debug("Removing cached token '{0}'".format(self.token_path))
os.remove(self.token_path)
| Two logouts issued when only one logout() executed.
I am using a single logout() command, but the requests sent by the code I see two been issued. The problem is the first one succeed (200) and the second call fails (probably because the first 200). | EMCECS/python-ecsclient | diff --git a/tests/unit/test_authentication.py b/tests/unit/test_authentication.py
index 814018f..82de270 100644
--- a/tests/unit/test_authentication.py
+++ b/tests/unit/test_authentication.py
@@ -1,3 +1,4 @@
+import logging
import testtools
from mock import mock
from requests.auth import _basic_auth_str
@@ -12,6 +13,7 @@ class TestAuthentication(testtools.TestCase):
LOGOUT_URL = 'http://127.0.0.1:4443/logout'
def setUp(self, *args, **kwargs):
+ # logging.basicConfig(level=logging.DEBUG)
super(TestAuthentication, self).setUp(*args, **kwargs)
self.client = Client(username='someone',
password='password',
@@ -19,61 +21,105 @@ class TestAuthentication(testtools.TestCase):
token_endpoint='http://127.0.0.1:4443/login')
self.requests_mock = self.useFixture(fixture.Fixture())
- @mock.patch('ecsclient.baseclient.os.path.isfile')
- def test_get_token_valid_credentials(self, mock_isfile):
- mock_isfile.return_value = False
- self.requests_mock.register_uri('GET', self.LOGIN_URL, headers={'X-SDS-AUTH-TOKEN': 'token'})
+ def test_get_token_valid_credentials(self):
+ self.requests_mock.register_uri('GET', self.LOGIN_URL, headers={'X-SDS-AUTH-TOKEN': 'FAKE-TOKEN-123'})
self.assertIsNone(self.client.token)
- self.assertIsNone(self.client._token_request._get_existing_token())
token = self.client.get_token()
- self.assertEqual(token, 'token')
- self.assertEqual(self.client._token_request._get_existing_token(), 'token')
+ self.assertEqual(token, 'FAKE-TOKEN-123')
+ self.assertEqual(self.client._token_request.token, 'FAKE-TOKEN-123')
self.assertEqual(self.requests_mock.last_request.method, 'GET')
self.assertEqual(self.requests_mock.last_request.url, self.LOGIN_URL)
self.assertEqual(self.requests_mock.last_request.headers['authorization'],
_basic_auth_str('someone', 'password'))
- @mock.patch('ecsclient.baseclient.os.path.isfile')
- def test_get_token_invalid_credentials(self, mock_isfile):
- mock_isfile.return_value = False
+ def test_get_token_invalid_credentials(self):
self.requests_mock.register_uri('GET', self.LOGIN_URL, status_code=401, text='body')
with super(testtools.TestCase, self).assertRaises(ECSClientException) as error:
self.client.get_token()
exception = error.exception
+ self.assertIsNone(self.client._token_request.token)
self.assertEqual(exception.message, 'Invalid username or password')
self.assertEqual(exception.http_response_content, 'body')
self.assertEqual(exception.http_status, 401)
- self.assertIsNone(self.client._token_request._get_existing_token())
self.assertEqual(self.requests_mock.last_request.method, 'GET')
self.assertEqual(self.requests_mock.last_request.url, self.LOGIN_URL)
self.assertEqual(self.requests_mock.last_request.headers['authorization'],
_basic_auth_str('someone', 'password'))
- @mock.patch('ecsclient.common.token_request.TokenRequest.get_token')
- def test_logout(self, mock_get_token):
- mock_get_token.return_value = 'token'
+ @mock.patch('ecsclient.baseclient.os.remove')
+ @mock.patch('ecsclient.baseclient.os.path.isfile')
+ def test_logout(self, mock_isfile, mock_remove):
+ self.client.token = 'FAKE-TOKEN-123'
+ self.client._token_request.token = 'FAKE-TOKEN-123'
self.requests_mock.register_uri('GET', self.LOGOUT_URL, text="{'user': 'someone'}")
+ mock_isfile.return_value = True
+ mock_remove.return_value = True
resp = self.client.authentication.logout()
self.assertEqual(resp, "{'user': 'someone'}")
+ self.assertIsNone(self.client.token)
+ self.assertIsNone(self.client._token_request.token)
+ mock_isfile.assert_called_with('/tmp/ecsclient.tkn')
+ mock_remove.assert_called_with('/tmp/ecsclient.tkn')
self.assertEqual(self.requests_mock.last_request.method, 'GET')
self.assertEqual(self.requests_mock.last_request.url, self.LOGOUT_URL)
- self.assertEqual(self.requests_mock.last_request.headers['x-sds-auth-token'], 'token')
+ self.assertEqual(self.requests_mock.last_request.headers['x-sds-auth-token'], 'FAKE-TOKEN-123')
- @mock.patch('ecsclient.common.token_request.TokenRequest.get_token')
- def test_logout_force(self, mock_get_token):
- mock_get_token.return_value = 'token'
+ @mock.patch('ecsclient.baseclient.os.remove')
+ @mock.patch('ecsclient.baseclient.os.path.isfile')
+ def test_logout_force(self, mock_isfile, mock_remove):
+ self.client.token = 'FAKE-TOKEN-123'
+ self.client._token_request.token = 'FAKE-TOKEN-123'
self.requests_mock.register_uri('GET', self.LOGOUT_URL + '?force=True', text="{'user': 'someone'}")
+ mock_isfile.return_value = True
+ mock_remove.return_value = True
resp = self.client.authentication.logout(force=True)
self.assertEqual(resp, "{'user': 'someone'}")
+ self.assertIsNone(self.client.token)
+ self.assertIsNone(self.client._token_request.token)
+ mock_isfile.assert_called_with('/tmp/ecsclient.tkn')
+ mock_remove.assert_called_with('/tmp/ecsclient.tkn')
self.assertEqual(self.requests_mock.last_request.method, 'GET')
self.assertEqual(self.requests_mock.last_request.url, self.LOGOUT_URL + '?force=True')
self.assertEqual(self.requests_mock.last_request.qs['force'], ['true'])
- self.assertEqual(self.requests_mock.last_request.headers['x-sds-auth-token'], 'token')
+ self.assertEqual(self.requests_mock.last_request.headers['x-sds-auth-token'], 'FAKE-TOKEN-123')
+
+ def test_logout_when_logged_out(self):
+ self.client._token_request.token = 'FAKE-TOKEN-123'
+ self.client._token_request.cache_token = False
+ self.requests_mock.register_uri('GET', self.LOGOUT_URL, text="{'user': 'someone'}")
+ self.requests_mock.register_uri('GET', 'http://127.0.0.1:4443/user/whoami')
+
+ resp = self.client.authentication.logout()
+
+ self.assertEqual(resp, "{'user': 'someone'}")
+
+ resp2 = self.client.authentication.logout()
+
+ self.assertIsNone(resp2)
+
+ def test_logout_and_reconnect(self):
+ self.client.token = 'FAKE-TOKEN-123'
+ self.client._token_request.token = 'FAKE-TOKEN-123'
+ self.client._token_request.cache_token = False
+ self.requests_mock.register_uri('GET', self.LOGOUT_URL, text="{'user': 'someone'}")
+
+ self.client.authentication.logout()
+
+ self.assertIsNone(self.client.token)
+ self.assertIsNone(self.client._token_request.token)
+
+ self.requests_mock.register_uri('GET', self.LOGIN_URL, headers={'X-SDS-AUTH-TOKEN': 'NEW-TOKEN-123'})
+
+ self.client.get('login')
+
+ self.assertEqual(self.client._token_request.token, 'NEW-TOKEN-123')
+
+
diff --git a/tests/unit/test_ecsclient.py b/tests/unit/test_ecsclient.py
index 0d5cf14..d24ad9c 100644
--- a/tests/unit/test_ecsclient.py
+++ b/tests/unit/test_ecsclient.py
@@ -66,6 +66,7 @@ class TestEcsClient(unittest.TestCase):
password='password',
ecs_endpoint='https://192.168.1.10')
exception = error.exception.message
+ mock_isfile.assert_called_with('/tmp/ecsclient.tkn')
self.assertEqual("'token_endpoint' not provided and missing 'token'|'token_path'", str(exception))
def test_client_without_credentials(self):
@@ -112,6 +113,7 @@ class TestEcsClient(unittest.TestCase):
def test_client_init_with_token_path(self, mock_isfile):
mock_isfile.return_value = True
c = Client(version='3',
- token_path='/tmp/token.tkn',
+ token_path='/tmp/mytoken.tkn',
ecs_endpoint='https://192.168.1.10')
self.assertTrue(hasattr(c, 'token_path'))
+ mock_isfile.assert_called_with('/tmp/mytoken.tkn')
diff --git a/tests/unit/test_node.py b/tests/unit/test_node.py
index c6383c8..5c01719 100644
--- a/tests/unit/test_node.py
+++ b/tests/unit/test_node.py
@@ -45,29 +45,33 @@ class TestNode(testtools.TestCase):
self.response = MagicMock()
self.requests_mock = self.useFixture(fixture.Fixture())
- @mock.patch('ecsclient.common.token_request.TokenRequest.get_new_token')
- def test_get_nodes_should_throw_ecsclientexception(self, mock_get_new_token):
+ @mock.patch('ecsclient.common.token_request.TokenRequest.get_token')
+ def test_get_nodes_throw_exception(self, mock_get_token):
self.requests_mock.register_uri('GET', 'https://127.0.0.1:4443/vdc/nodes',
status_code=http_client.INTERNAL_SERVER_ERROR,
text='Server Error')
- mock_get_new_token.return_value = 'FAKE-TOKEN-123'
+ mock_get_token.return_value = 'FAKE-TOKEN-123'
with super(testtools.TestCase, self).assertRaises(ECSClientException) as error:
self.client.node.get_nodes()
exception = error.exception
+ self.assertEqual(self.requests_mock.last_request.method, 'GET')
+ self.assertEqual(self.requests_mock.last_request.url, 'https://127.0.0.1:4443/vdc/nodes')
+ self.assertEqual(self.requests_mock.last_request.headers['x-sds-auth-token'], 'FAKE-TOKEN-123')
self.assertEqual(exception.http_response_content, 'Server Error')
self.assertEqual(exception.http_status, http_client.INTERNAL_SERVER_ERROR)
- def test_get_nodes(self):
- self.response.status_code = http_client.OK
- self.response.body = self.returned_json
- self.response.json = MagicMock(return_value=self.returned_json)
- self.requests = MagicMock(return_value=self.response)
- self.requests.get.side_effect = [self.response]
+ @mock.patch('ecsclient.common.token_request.TokenRequest.get_token')
+ def test_get_nodes(self, mock_get_token):
+ mock_get_token.return_value = 'FAKE-TOKEN-123'
+ self.requests_mock.register_uri('GET', 'https://127.0.0.1:4443/vdc/nodes',
+ status_code=http_client.OK,
+ json=self.returned_json)
+
+ response = self.client.node.get_nodes()
- with patch('ecsclient.common.token_request.TokenRequest.'
- '_get_existing_token', return_value='FAKE-TOKEN-123'):
- with patch('ecsclient.baseclient.requests.Session.get', self.requests):
- returned_json = self.client.node.get_nodes()
- self.assertEqual(returned_json, self.returned_json)
+ self.assertEqual(self.requests_mock.last_request.method, 'GET')
+ self.assertEqual(self.requests_mock.last_request.url, 'https://127.0.0.1:4443/vdc/nodes')
+ self.assertEqual(self.requests_mock.last_request.headers['x-sds-auth-token'], 'FAKE-TOKEN-123')
+ self.assertEqual(response, self.returned_json)
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 2
} | 1.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"flake8==3.2.1",
"mock==2.0.0",
"nose==1.3.7",
"coverage==4.3.4",
"jsonschema==2.6.0",
"tox==2.6.0",
"testtools==2.2.0",
"requests-mock[fixture]==1.3.0",
"pytest"
],
"pre_install": null,
"python": "3.6",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
coverage==4.3.4
distlib==0.3.9
extras==1.0.0
filelock==3.4.1
fixtures==4.0.1
flake8==3.2.1
importlib-metadata==4.8.3
importlib-resources==5.4.0
iniconfig==1.1.1
jsonschema==2.6.0
linecache2==1.0.0
mccabe==0.5.3
mock==2.0.0
nose==1.3.7
packaging==21.3
pbr==6.1.1
platformdirs==2.4.0
pluggy==0.13.1
py==1.11.0
pycodestyle==2.2.0
pyflakes==1.3.0
pyparsing==3.1.4
pytest==7.0.1
-e git+https://github.com/EMCECS/python-ecsclient.git@546e342440f7ca0274d222179c6eca790d1d6573#egg=python_ecsclient
python-mimeparse==1.6.0
requests==2.9.1
requests-mock==1.3.0
six==1.10.0
testtools==2.2.0
tomli==1.2.3
tox==2.6.0
traceback2==1.4.0
typing_extensions==4.1.1
unittest2==1.1.0
virtualenv==20.17.1
zipp==3.6.0
| name: python-ecsclient
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- argparse==1.4.0
- attrs==22.2.0
- coverage==4.3.4
- distlib==0.3.9
- extras==1.0.0
- filelock==3.4.1
- fixtures==4.0.1
- flake8==3.2.1
- importlib-metadata==4.8.3
- importlib-resources==5.4.0
- iniconfig==1.1.1
- jsonschema==2.6.0
- linecache2==1.0.0
- mccabe==0.5.3
- mock==2.0.0
- nose==1.3.7
- packaging==21.3
- pbr==6.1.1
- platformdirs==2.4.0
- pluggy==0.13.1
- py==1.11.0
- pycodestyle==2.2.0
- pyflakes==1.3.0
- pyparsing==3.1.4
- pytest==7.0.1
- python-mimeparse==1.6.0
- requests==2.9.1
- requests-mock==1.3.0
- six==1.10.0
- testtools==2.2.0
- tomli==1.2.3
- tox==2.6.0
- traceback2==1.4.0
- typing-extensions==4.1.1
- unittest2==1.1.0
- virtualenv==20.17.1
- zipp==3.6.0
prefix: /opt/conda/envs/python-ecsclient
| [
"tests/unit/test_authentication.py::TestAuthentication::test_logout",
"tests/unit/test_authentication.py::TestAuthentication::test_logout_and_reconnect",
"tests/unit/test_authentication.py::TestAuthentication::test_logout_force",
"tests/unit/test_authentication.py::TestAuthentication::test_logout_when_logged_out"
]
| []
| [
"tests/unit/test_authentication.py::TestAuthentication::test_get_token_invalid_credentials",
"tests/unit/test_authentication.py::TestAuthentication::test_get_token_valid_credentials",
"tests/unit/test_ecsclient.py::TestEcsClient::test_client_init_with_credentials",
"tests/unit/test_ecsclient.py::TestEcsClient::test_client_init_with_token",
"tests/unit/test_ecsclient.py::TestEcsClient::test_client_init_with_token_path",
"tests/unit/test_ecsclient.py::TestEcsClient::test_client_unsupported_version",
"tests/unit/test_ecsclient.py::TestEcsClient::test_client_v2_class",
"tests/unit/test_ecsclient.py::TestEcsClient::test_client_v3_class",
"tests/unit/test_ecsclient.py::TestEcsClient::test_client_without_credentials",
"tests/unit/test_ecsclient.py::TestEcsClient::test_client_without_ecs_endpoint",
"tests/unit/test_ecsclient.py::TestEcsClient::test_client_without_token_endpoint",
"tests/unit/test_ecsclient.py::TestEcsClient::test_client_without_version",
"tests/unit/test_ecsclient.py::TestEcsClient::test_verify_attributes",
"tests/unit/test_node.py::TestNode::test_get_nodes",
"tests/unit/test_node.py::TestNode::test_get_nodes_throw_exception"
]
| []
| Apache License 2.0 | 1,125 | [
"ecsclient/baseclient.py",
"ecsclient/authentication.py"
]
| [
"ecsclient/baseclient.py",
"ecsclient/authentication.py"
]
|
theskumar__python-dotenv-52 | 9552db8d8c25753ec4f1a724f64d895b9daa6296 | 2017-03-30 09:50:38 | 9552db8d8c25753ec4f1a724f64d895b9daa6296 | coveralls:
[](https://coveralls.io/builds/10844638)
Coverage remained the same at 82.418% when pulling **a2096aa6fc7425ed5c8d9280cc3acd05a6fa5eab on Flimm:naive-single-quotes** into **9552db8d8c25753ec4f1a724f64d895b9daa6296 on theskumar:master**.
theskumar: @Flimm At the first look it does not seem to fix the issue where parser keeps the `'` quote around the value when reading the `.env` file. thoughts?
Flimm: Sorry, you're right, I forgot to push a third commit. 😳 Have a look now.
coveralls:
[](https://coveralls.io/builds/10845005)
Coverage remained the same at 82.418% when pulling **9b5582ea85c99f704bce9012c2741d28b46981ed on Flimm:naive-single-quotes** into **9552db8d8c25753ec4f1a724f64d895b9daa6296 on theskumar:master**.
coveralls:
[](https://coveralls.io/builds/10845005)
Coverage remained the same at 82.418% when pulling **9b5582ea85c99f704bce9012c2741d28b46981ed on Flimm:naive-single-quotes** into **9552db8d8c25753ec4f1a724f64d895b9daa6296 on theskumar:master**.
coveralls:
[](https://coveralls.io/builds/10845005)
Coverage remained the same at 82.418% when pulling **9b5582ea85c99f704bce9012c2741d28b46981ed on Flimm:naive-single-quotes** into **9552db8d8c25753ec4f1a724f64d895b9daa6296 on theskumar:master**.
theskumar: This is looks pretty good. Love the tests 😍 ! | diff --git a/README.rst b/README.rst
index 936a5a2..8b2a039 100644
--- a/README.rst
+++ b/README.rst
@@ -126,7 +126,8 @@ update your settings on remote server, handy isn't it!
file in current working directory.
-q, --quote [always|never|auto]
Whether to quote or not the variable values.
- Default mode is always.
+ Default mode is always. This does not affect
+ parsing.
--help Show this message and exit.
Commands:
diff --git a/dotenv/cli.py b/dotenv/cli.py
index 9a99314..125a0a8 100644
--- a/dotenv/cli.py
+++ b/dotenv/cli.py
@@ -11,7 +11,7 @@ from .main import get_key, dotenv_values, set_key, unset_key
help="Location of the .env file, defaults to .env file in current working directory.")
@click.option('-q', '--quote', default='always',
type=click.Choice(['always', 'never', 'auto']),
- help="Whether to quote or not the variable values. Default mode is always.")
+ help="Whether to quote or not the variable values. Default mode is always. This does not affect parsing.")
@click.pass_context
def cli(ctx, file, quote):
'''This script is used to set, get or unset values from a .env file.'''
diff --git a/dotenv/main.py b/dotenv/main.py
index 2fe1a83..ceac3fa 100644
--- a/dotenv/main.py
+++ b/dotenv/main.py
@@ -103,7 +103,7 @@ def parse_dotenv(dotenv_path):
k, v = k.strip(), v.strip()
if len(v) > 0:
- quoted = v[0] == v[len(v) - 1] == '"'
+ quoted = v[0] == v[len(v) - 1] in ['"', "'"]
if quoted:
v = decode_escaped(v[1:-1])
| Wrong parsing of env variables in single quotes
I have the following `.env` file:
```
DATABASE_URL='postgres://localhost:5432/myapp_development'
```
When I run `dotenv get DATABASE_URL` this is what I get:
`DATABASE_URL="'postgres://localhost:5432/simulator_development'"`
When I try to use this with [dj-database-url](https://github.com/kennethreitz/dj-database-url) it is failing to parse the `DATABASE_URL` environment variable as it is.
It seems using single quotes in the `.env` file is causing this.
It would be nice if this were documented somewhere if the behavior is intended
I spent quite a bit of time trying to figure out where the error was.
Thanks 😃
| theskumar/python-dotenv | diff --git a/tests/test_cli.py b/tests/test_cli.py
index d78172b..449b54a 100644
--- a/tests/test_cli.py
+++ b/tests/test_cli.py
@@ -46,6 +46,18 @@ def test_key_value_without_quotes():
sh.rm(dotenv_path)
+def test_value_with_quotes():
+ with open(dotenv_path, 'w') as f:
+ f.write('TEST="two words"\n')
+ assert dotenv.get_key(dotenv_path, 'TEST') == 'two words'
+ sh.rm(dotenv_path)
+
+ with open(dotenv_path, 'w') as f:
+ f.write("TEST='two words'\n")
+ assert dotenv.get_key(dotenv_path, 'TEST') == 'two words'
+ sh.rm(dotenv_path)
+
+
def test_unset():
sh.touch(dotenv_path)
success, key_to_set, value_to_set = dotenv.set_key(dotenv_path, 'HELLO', 'WORLD')
@@ -104,6 +116,13 @@ def test_get_key_with_interpolation(cli):
dotenv.set_key(dotenv_path, 'FOO', '${HELLO}')
dotenv.set_key(dotenv_path, 'BAR', 'CONCATENATED_${HELLO}_POSIX_VAR')
+ lines = list(open(dotenv_path, "r").readlines())
+ assert lines == [
+ 'HELLO="WORLD"\n',
+ 'FOO="${HELLO}"\n',
+ 'BAR="CONCATENATED_${HELLO}_POSIX_VAR"\n',
+ ]
+
# test replace from variable in file
stored_value = dotenv.get_key(dotenv_path, 'FOO')
assert stored_value == 'WORLD'
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 3
} | 0.6 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"flake8"
],
"pre_install": null,
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
bump2version==1.0.1
bumpversion==0.6.0
certifi==2021.5.30
click==8.0.4
coverage==6.2
flake8==5.0.4
importlib-metadata==4.2.0
iniconfig==1.1.1
mccabe==0.7.0
packaging==21.3
pluggy==1.0.0
py==1.11.0
pycodestyle==2.9.1
pyflakes==2.5.0
pyparsing==3.1.4
pytest==7.0.1
pytest-cov==4.0.0
pytest-flake8==1.1.1
-e git+https://github.com/theskumar/python-dotenv.git@9552db8d8c25753ec4f1a724f64d895b9daa6296#egg=python_dotenv
sh==1.14.3
tomli==1.2.3
typing_extensions==4.1.1
zipp==3.6.0
| name: python-dotenv
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- bump2version==1.0.1
- bumpversion==0.6.0
- click==8.0.4
- coverage==6.2
- flake8==5.0.4
- importlib-metadata==4.2.0
- iniconfig==1.1.1
- mccabe==0.7.0
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pycodestyle==2.9.1
- pyflakes==2.5.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-cov==4.0.0
- pytest-flake8==1.1.1
- sh==1.14.3
- tomli==1.2.3
- typing-extensions==4.1.1
- zipp==3.6.0
prefix: /opt/conda/envs/python-dotenv
| [
"tests/test_cli.py::test_value_with_quotes"
]
| [
"tests/test_cli.py::test_list_wo_file"
]
| [
"tests/test_cli.py::test_get_key",
"tests/test_cli.py::test_list",
"tests/test_cli.py::test_key_value_without_quotes",
"tests/test_cli.py::test_unset",
"tests/test_cli.py::test_console_script",
"tests/test_cli.py::test_default_path",
"tests/test_cli.py::test_get_key_with_interpolation",
"tests/test_cli.py::test_get_key_with_interpolation_of_unset_variable"
]
| []
| BSD 3-Clause "New" or "Revised" License | 1,127 | [
"README.rst",
"dotenv/main.py",
"dotenv/cli.py"
]
| [
"README.rst",
"dotenv/main.py",
"dotenv/cli.py"
]
|
Stranger6667__postmarker-125 | 604005f5a384995207ffb53e6de2b8f3fa2b38db | 2017-03-30 11:51:48 | 1dba208c24b2e31c5deb733a4da19196bc7f04d5 | codecov[bot]: # [Codecov](https://codecov.io/gh/Stranger6667/postmarker/pull/125?src=pr&el=h1) Report
> Merging [#125](https://codecov.io/gh/Stranger6667/postmarker/pull/125?src=pr&el=desc) into [master](https://codecov.io/gh/Stranger6667/postmarker/commit/604005f5a384995207ffb53e6de2b8f3fa2b38db?src=pr&el=desc) will **decrease** coverage by `0.12%`.
> The diff coverage is `87.5%`.
[](https://codecov.io/gh/Stranger6667/postmarker/pull/125?src=pr&el=tree)
```diff
@@ Coverage Diff @@
## master #125 +/- ##
==========================================
- Coverage 100% 99.87% -0.13%
==========================================
Files 20 20
Lines 788 790 +2
Branches 61 61
==========================================
+ Hits 788 789 +1
- Misses 0 1 +1
```
| [Impacted Files](https://codecov.io/gh/Stranger6667/postmarker/pull/125?src=pr&el=tree) | Coverage Δ | |
|---|---|---|
| [postmarker/models/messages.py](https://codecov.io/gh/Stranger6667/postmarker/pull/125?src=pr&el=tree#diff-cG9zdG1hcmtlci9tb2RlbHMvbWVzc2FnZXMucHk=) | `98.93% <87.5%> (-1.07%)` | :arrow_down: |
------
[Continue to review full report at Codecov](https://codecov.io/gh/Stranger6667/postmarker/pull/125?src=pr&el=continue).
> **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta)
> `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data`
> Powered by [Codecov](https://codecov.io/gh/Stranger6667/postmarker/pull/125?src=pr&el=footer). Last update [604005f...8c2bf75](https://codecov.io/gh/Stranger6667/postmarker/pull/125?src=pr&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments). | diff --git a/docs/changelog.rst b/docs/changelog.rst
index 2ab7edc..208173f 100644
--- a/docs/changelog.rst
+++ b/docs/changelog.rst
@@ -11,6 +11,12 @@ Added
- Short-circuit send of empty batches in `Django` backend. `#123`_
+Changed
+~~~~~~~
+
+- ``OutboundMessageManager.get_details`` and ``InboundMessageManager.get_details`` were methods were renamed to ``get``.
+ Now they returns ``OutboundMessage`` and ``InboundMessage`` instances respectively.
+
`0.9.2`_ - 2017-03-29
---------------------
diff --git a/postmarker/models/messages.py b/postmarker/models/messages.py
index 6e946a0..361515f 100644
--- a/postmarker/models/messages.py
+++ b/postmarker/models/messages.py
@@ -35,8 +35,8 @@ class OpensManager(ModelManager):
class BaseMessage(Model):
- def get_details(self):
- return self._manager.get_details(self.MessageID)
+ def get(self):
+ return self._manager.get(self.MessageID)
class OutboundMessage(BaseMessage):
@@ -82,8 +82,9 @@ class OutboundMessageManager(SubModelManager):
)
return self.expand_responses(responses, 'Messages')
- def get_details(self, id):
- return self.call('GET', '/messages/outbound/%s/details' % id)
+ def get(self, id):
+ response = self.call('GET', '/messages/outbound/%s/details' % id)
+ return self._init_instance(response)
def get_dump(self, id):
return self.call('GET', '/messages/outbound/%s/dump' % id).get('Body')
@@ -169,8 +170,9 @@ class InboundMessageManager(ModelManager):
)
return self.expand_responses(responses, 'InboundMessages')
- def get_details(self, id):
- return self.call('GET', '/messages/inbound/%s/details' % id)
+ def get(self, id):
+ response = self.call('GET', '/messages/inbound/%s/details' % id)
+ return self._init_instance(response)
def bypass(self, id):
return self.call('PUT', '/messages/inbound/%s/bypass' % id)
| Initialize inbound/outbound message in `get_details`
And probably rename method to ``get`` | Stranger6667/postmarker | diff --git a/tests/models/test_messages.py b/tests/models/test_messages.py
index 412296f..7910fb7 100644
--- a/tests/models/test_messages.py
+++ b/tests/models/test_messages.py
@@ -8,6 +8,8 @@ from postmarker.exceptions import ClientError
from postmarker.models.base import ModelManager
from postmarker.models.messages import Attachment, InboundMessage, Open, OutboundMessage
+from .._compat import patch
+
CASSETTE_NAME = 'messages'
@@ -39,8 +41,8 @@ class TestOutboundMessages:
assert isinstance(outbound_message, OutboundMessage)
assert str(outbound_message) == 'Sent message to [email protected]'
- def test_get_details(self, outbound_message):
- assert outbound_message.get_details()['Body'] == 'Body example'
+ def test_get(self, outbound_message):
+ assert outbound_message.get().Body == 'Body example'
def test_get_dump(self, outbound_message):
assert outbound_message.get_dump() == 'Body example'
@@ -60,9 +62,10 @@ class TestInboundMessages:
assert isinstance(inbound_message, InboundMessage)
assert str(inbound_message) == 'Blocked message from [email protected]'
- def test_get_details(self, inbound_message):
- with not_found():
- inbound_message.get_details()
+ def test_get(self, inbound_message):
+ with patch.object(inbound_message._manager, 'call', return_value=inbound_message.as_dict()):
+ instance = inbound_message.get()
+ assert isinstance(instance, InboundMessage)
def test_bypass(self, inbound_message):
with not_found('[701] This message was not found or cannot be bypassed.'):
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 2
} | 0.9 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-django",
"betamax",
"betamax_serializers",
"mock"
],
"pre_install": null,
"python": "3.6",
"reqs_path": [
"docs/requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.13
attrs==22.2.0
Babel==2.11.0
betamax==0.8.1
betamax-serializers==0.2.1
certifi==2021.5.30
charset-normalizer==2.0.12
coverage==6.2
docutils==0.18.1
idna==3.10
imagesize==1.4.1
importlib-metadata==4.8.3
iniconfig==1.1.1
Jinja2==3.0.3
MarkupSafe==2.0.1
mock==5.2.0
packaging==21.3
pluggy==1.0.0
-e git+https://github.com/Stranger6667/postmarker.git@604005f5a384995207ffb53e6de2b8f3fa2b38db#egg=postmarker
py==1.11.0
Pygments==2.14.0
pyparsing==3.1.4
pytest==7.0.1
pytest-cov==4.0.0
pytest-django==4.5.2
pytz==2025.2
requests==2.27.1
snowballstemmer==2.2.0
Sphinx==5.3.0
sphinx-rtd-theme==0.1.9
sphinxcontrib-applehelp==1.0.2
sphinxcontrib-devhelp==1.0.2
sphinxcontrib-htmlhelp==2.0.0
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==1.0.3
sphinxcontrib-serializinghtml==1.1.5
tomli==1.2.3
typing_extensions==4.1.1
urllib3==1.26.20
zipp==3.6.0
| name: postmarker
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.13
- attrs==22.2.0
- babel==2.11.0
- betamax==0.8.1
- betamax-serializers==0.2.1
- charset-normalizer==2.0.12
- coverage==6.2
- docutils==0.18.1
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- jinja2==3.0.3
- markupsafe==2.0.1
- mock==5.2.0
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pygments==2.14.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-cov==4.0.0
- pytest-django==4.5.2
- pytz==2025.2
- requests==2.27.1
- snowballstemmer==2.2.0
- sphinx==5.3.0
- sphinx-rtd-theme==0.1.9
- sphinxcontrib-applehelp==1.0.2
- sphinxcontrib-devhelp==1.0.2
- sphinxcontrib-htmlhelp==2.0.0
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==1.0.3
- sphinxcontrib-serializinghtml==1.1.5
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- zipp==3.6.0
prefix: /opt/conda/envs/postmarker
| [
"tests/models/test_messages.py::TestOutboundMessages::test_get",
"tests/models/test_messages.py::TestInboundMessages::test_get"
]
| []
| [
"tests/models/test_messages.py::test_sub_managers",
"tests/models/test_messages.py::TestOutboundMessages::test_message",
"tests/models/test_messages.py::TestOutboundMessages::test_get_dump",
"tests/models/test_messages.py::TestOutboundMessages::test_opens",
"tests/models/test_messages.py::TestInboundMessages::test_message",
"tests/models/test_messages.py::TestInboundMessages::test_bypass",
"tests/models/test_messages.py::TestInboundMessages::test_retry",
"tests/models/test_messages.py::TestInboundMessages::test_all",
"tests/models/test_messages.py::TestInboundMessages::test_headers",
"tests/models/test_messages.py::TestInboundMessages::test_not_existing_header",
"tests/models/test_messages.py::TestAttachment::test_instance",
"tests/models/test_messages.py::TestAttachment::test_repr",
"tests/models/test_messages.py::TestAttachment::test_len",
"tests/models/test_messages.py::TestAttachment::test_attributes[ContentType]",
"tests/models/test_messages.py::TestAttachment::test_attributes[Name]",
"tests/models/test_messages.py::TestAttachment::test_attributes[ContentLength]",
"tests/models/test_messages.py::TestAttachment::test_attributes[Content]",
"tests/models/test_messages.py::TestAttachment::test_save",
"tests/models/test_messages.py::TestAttachment::test_as_mime",
"tests/models/test_messages.py::TestOpens::test_model",
"tests/models/test_messages.py::TestOpens::test_webhook"
]
| []
| MIT License | 1,128 | [
"docs/changelog.rst",
"postmarker/models/messages.py"
]
| [
"docs/changelog.rst",
"postmarker/models/messages.py"
]
|
dssg__triage-79 | 9a81fbeb8affcbf9b3bcad6bfe2321e9b2def963 | 2017-03-30 14:58:54 | 478ac2e52e0b074c262eb9fadf25c3ff598cb911 | diff --git a/example_experiment_config.yaml b/example_experiment_config.yaml
index 19a25844..2b5cd470 100644
--- a/example_experiment_config.yaml
+++ b/example_experiment_config.yaml
@@ -87,6 +87,21 @@ feature_aggregations:
# A list of different columns to separately group by
groups:
- 'entity_id'
+
+# FEATURE GROUPING
+# define how to group features and generate combinations
+# feature_group_definition allows you to create groups/subset of your features
+# by different criteria.
+# for instance, 'tables' allows you to send a list of collate feature tables
+# 'prefix' allows you to specify a list of feature name prefixes
+feature_group_definition:
+ tables: ['prefix_entity_id']
+
+# strategies for generating combinations of groups
+# available: all, leave-one-out
+feature_group_strategies: ['all']
+
+
# GRID CONFIGURATION
# The classifier/hyperparameter combinations that should be trained
#
diff --git a/triage/feature_group_creator.py b/triage/feature_group_creator.py
new file mode 100644
index 00000000..95c82033
--- /dev/null
+++ b/triage/feature_group_creator.py
@@ -0,0 +1,62 @@
+def table_subsetter(config_item, table, features):
+ "Return features matching a given table"
+ if table == config_item:
+ return features
+ else:
+ return []
+
+
+def prefix_subsetter(config_item, table, features):
+ "Return features matching a given prefix"
+ return [
+ feature
+ for feature in features
+ if feature.split('_')[0] == config_item
+ ]
+
+
+def all_subsetter(config_item, table, features):
+ return features
+
+
+class FeatureGroupCreator(object):
+ subsetters = {
+ 'tables': table_subsetter,
+ 'prefix': prefix_subsetter,
+ 'all': all_subsetter,
+ }
+
+ def __init__(self, definition):
+ """
+ Args:
+ definition (dict) rules for generating feature groups
+ Each key must correspond to a key in self.subsetters
+ Each value (a list) must be understood by the subsetter
+ """
+ self.definition = definition
+
+ for subsetter in self.definition.keys():
+ if subsetter not in self.subsetters:
+ raise KeyError('Unknown subsetter %s received', subsetter)
+
+ def subsets(self, feature_dictionary):
+ """Generate subsets of a feature dict
+
+ Args:
+ feature_dictionary (dict) tables and the features contained in each
+
+ Returns: (list) subsets of the feature dictionary
+ """
+ subsets = []
+ for name, config in sorted(self.definition.items()):
+ for config_item in config:
+ subset = {}
+ for table, features in feature_dictionary.items():
+ matching_features =\
+ self.subsetters[name](config_item, table, features)
+ if len(matching_features) > 0:
+ subset[table] = matching_features
+ subsets.append(subset)
+ if not any(subset for subset in subsets if any(subset)):
+ raise Exception('No matching feature subsets found!')
+ return subsets
diff --git a/triage/feature_group_mixer.py b/triage/feature_group_mixer.py
new file mode 100644
index 00000000..a04fa900
--- /dev/null
+++ b/triage/feature_group_mixer.py
@@ -0,0 +1,46 @@
+def leave_one_out(feature_groups):
+ """For each group, return a copy of all groups excluding that group
+
+ Args:
+ feature_groups (list) The feature groups to apply the strategy to
+
+ Returns: A list of feature dicts
+ """
+ results = []
+ for index_to_exclude in range(0, len(feature_groups)):
+ group_copy = feature_groups.copy()
+ del group_copy[index_to_exclude]
+ feature_dict = {}
+ for group in group_copy:
+ feature_dict.update(group)
+ results.append(feature_dict)
+ return results
+
+
+def all_features(feature_groups):
+ return feature_groups
+
+
+class FeatureGroupMixer(object):
+ """Generates different combinations of feature groups
+ based on a list of strategies"""
+ strategy_lookup = {
+ 'leave-one-out': leave_one_out,
+ 'all': all_features,
+ }
+
+ def __init__(self, strategies):
+ self.strategies = strategies
+
+ def generate(self, feature_groups):
+ """Apply all strategies to the list of feature groups
+
+ Args:
+ feature_groups (list) A list of feature dicts,
+ each representing a group
+ Returns: (list) of feature dicts
+ """
+ results = []
+ for strategy in self.strategies:
+ results += self.strategy_lookup[strategy](feature_groups)
+ return results
diff --git a/triage/features.py b/triage/features.py
index 5cd16bde..1b8d7c30 100644
--- a/triage/features.py
+++ b/triage/features.py
@@ -1,2 +1,4 @@
from triage.feature_generators import FeatureGenerator
from triage.feature_dictionary_creator import FeatureDictionaryCreator
+from triage.feature_group_creator import FeatureGroupCreator
+from triage.feature_group_mixer import FeatureGroupMixer
diff --git a/triage/pipelines/base.py b/triage/pipelines/base.py
index a1547c15..7903b352 100644
--- a/triage/pipelines/base.py
+++ b/triage/pipelines/base.py
@@ -1,6 +1,10 @@
from triage.db import ensure_db
from triage.label_generators import BinaryLabelGenerator
-from triage.features import FeatureGenerator, FeatureDictionaryCreator
+from triage.features import \
+ FeatureGenerator,\
+ FeatureDictionaryCreator,\
+ FeatureGroupCreator,\
+ FeatureGroupMixer
from triage.model_trainers import ModelTrainer
from triage.predictors import Predictor
from triage.scoring import ModelScorer
@@ -59,6 +63,14 @@ class PipelineBase(object):
db_engine=self.db_engine
)
+ self.feature_group_creator = FeatureGroupCreator(
+ self.config.get('feature_group_definition', {'all': [True]})
+ )
+
+ self.feature_group_mixer = FeatureGroupMixer(
+ self.config.get('feature_group_strategies', ['all'])
+ )
+
self.architect = Architect(
beginning_of_time=dt_from_str(split_config['beginning_of_time']),
label_names=['outcome'],
diff --git a/triage/pipelines/local_parallel.py b/triage/pipelines/local_parallel.py
index fb3c6efe..fa0564d2 100644
--- a/triage/pipelines/local_parallel.py
+++ b/triage/pipelines/local_parallel.py
@@ -45,9 +45,13 @@ class LocalParallelPipeline(PipelineBase):
feature_dates=all_as_of_times,
)
- feature_dict = self.feature_dictionary_creator\
+ master_feature_dict = self.feature_dictionary_creator\
.feature_dictionary(feature_tables)
+ feature_dicts = self.feature_group_mixer.generate(
+ self.feature_group_creator.subsets(master_feature_dict)
+ )
+
# 4. create training and test sets
logging.info('Creating matrices')
logging.debug('---------------------')
@@ -56,7 +60,7 @@ class LocalParallelPipeline(PipelineBase):
updated_split_definitions = self.architect.chop_data(
split_definitions,
- feature_dict
+ feature_dicts
)
for split in updated_split_definitions:
diff --git a/triage/pipelines/serial.py b/triage/pipelines/serial.py
index febb5fa6..79455689 100644
--- a/triage/pipelines/serial.py
+++ b/triage/pipelines/serial.py
@@ -39,9 +39,13 @@ class SerialPipeline(PipelineBase):
feature_dates=all_as_of_times,
)
- feature_dict = self.feature_dictionary_creator\
+ master_feature_dict = self.feature_dictionary_creator\
.feature_dictionary(feature_tables)
+ feature_dicts = self.feature_group_mixer.generate(
+ self.feature_group_creator.subsets(master_feature_dict)
+ )
+
# 4. create training and test sets
logging.info('Creating matrices')
logging.debug('---------------------')
@@ -50,7 +54,7 @@ class SerialPipeline(PipelineBase):
updated_split_definitions = self.architect.chop_data(
split_definitions,
- feature_dict
+ feature_dicts
)
for split in updated_split_definitions:
| Add in feature subsetting
We need two classes, that can be configured via the experiment config (names subject to change):
FeatureGroup
FeatureSubsettingStrategy
These are configurable per experiment, and thus their definition must be YAML-serializable. The former defines a way to create feature groups. Whether it be by table name, time, aggregation, etc. To start we are just implementing table names. The class will take in its configuration in the constructor, and have a method that takes in a feature dictionary and outputs the subset of that feature dictionary.
Lists of groups are paired with a strategy (leave one out/leave one in/use all combinations). The strategy, given a list of groups, can output an iterator/list of feature dictionaries. | dssg/triage | diff --git a/tests/test_feature_group_creator.py b/tests/test_feature_group_creator.py
new file mode 100644
index 00000000..1a326829
--- /dev/null
+++ b/tests/test_feature_group_creator.py
@@ -0,0 +1,53 @@
+from triage.features import FeatureGroupCreator
+
+
+def test_table_group():
+ group = FeatureGroupCreator(
+ definition={'tables': ['one', 'three']}
+ )
+
+ assert group.subsets({
+ 'one': ['col_a', 'col_b', 'col_c'],
+ 'two': ['col_a', 'col_b', 'col_c'],
+ 'three': ['col_a', 'col_b', 'col_c'],
+ }) == [
+ {'one': ['col_a', 'col_b', 'col_c']},
+ {'three': ['col_a', 'col_b', 'col_c']},
+ ]
+
+
+def test_prefix_group():
+ group = FeatureGroupCreator(
+ definition={'prefix': ['major', 'severe']}
+ )
+
+ assert group.subsets({
+ 'one': ['minor_a', 'minor_b', 'minor_c'],
+ 'two': ['severe_a', 'severe_b', 'severe_c'],
+ 'three': ['major_a', 'major_b', 'major_c'],
+ 'four': ['minor_a', 'minor_b', 'minor_c'],
+ }) == [
+ {'three': ['major_a', 'major_b', 'major_c']},
+ {'two': ['severe_a', 'severe_b', 'severe_c']},
+ ]
+
+
+def test_multiple_criteria():
+ group = FeatureGroupCreator(
+ definition={
+ 'prefix': ['major', 'severe'],
+ 'tables': ['one', 'two'],
+ }
+ )
+
+ assert group.subsets({
+ 'one': ['minor_a', 'minor_b', 'minor_c'],
+ 'two': ['severe_a', 'severe_b', 'severe_c'],
+ 'three': ['major_a', 'major_b', 'major_c'],
+ 'four': ['minor_a', 'minor_b', 'minor_c'],
+ }) == [
+ {'three': ['major_a', 'major_b', 'major_c']},
+ {'two': ['severe_a', 'severe_b', 'severe_c']},
+ {'one': ['minor_a', 'minor_b', 'minor_c']},
+ {'two': ['severe_a', 'severe_b', 'severe_c']},
+ ]
diff --git a/tests/test_feature_group_mixer.py b/tests/test_feature_group_mixer.py
new file mode 100644
index 00000000..2edeb1bb
--- /dev/null
+++ b/tests/test_feature_group_mixer.py
@@ -0,0 +1,18 @@
+import itertools
+
+from triage.feature_group_mixer import FeatureGroupMixer
+
+
+def test_feature_group_mixer_leave_one_out():
+ english_numbers = {'one': ['two', 'three'], 'four': ['five', 'six']}
+ letters = {'a': ['b', 'c'], 'd': ['e', 'f']}
+ german_numbers = {'eins': ['zwei', 'drei'], 'vier': ['funf', 'sechs']}
+ feature_groups = [english_numbers, letters, german_numbers]
+
+ result = FeatureGroupMixer(['leave-one-out']).generate(feature_groups)
+ expected = [
+ dict(itertools.chain(letters.items(), german_numbers.items())),
+ dict(itertools.chain(english_numbers.items(), german_numbers.items())),
+ dict(itertools.chain(english_numbers.items(), letters.items())),
+ ]
+ assert result == expected
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_added_files",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 2,
"test_score": 1
},
"num_modified_files": 5
} | 0.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest_v2",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc libblas-dev liblapack-dev libatlas-base-dev gfortran"
],
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "py.test -vvv -s --cov=triage"
} | attrs==22.2.0
boto3==1.23.10
botocore==1.26.10
certifi==2021.5.30
click==8.0.4
collate==0.3.0
coverage==6.2
GeoAlchemy2==0.11.1
greenlet==2.0.2
importlib-metadata==4.8.3
inflection==0.5.1
iniconfig==1.1.1
jmespath==0.10.0
joblib==1.1.1
numexpr==2.8.1
numpy==1.19.5
packaging==21.3
pandas==1.1.5
pluggy==1.0.0
psycopg2==2.7.7
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
pytest-cov==4.0.0
python-dateutil==2.9.0.post0
pytz==2025.2
PyYAML==6.0.1
s3transfer==0.5.2
scikit-learn==0.24.2
scipy==1.5.4
six==1.17.0
sklearn==0.0
SQLAlchemy==1.4.54
tables==3.7.0
threadpoolctl==3.1.0
tomli==1.2.3
-e git+https://github.com/dssg/triage.git@9a81fbeb8affcbf9b3bcad6bfe2321e9b2def963#egg=triage
typing_extensions==4.1.1
urllib3==1.26.20
zipp==3.6.0
| name: triage
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- boto3==1.23.10
- botocore==1.26.10
- click==8.0.4
- collate==0.3.0
- coverage==6.2
- geoalchemy2==0.11.1
- greenlet==2.0.2
- importlib-metadata==4.8.3
- inflection==0.5.1
- iniconfig==1.1.1
- jmespath==0.10.0
- joblib==1.1.1
- numexpr==2.8.1
- numpy==1.19.5
- packaging==21.3
- pandas==1.1.5
- pluggy==1.0.0
- psycopg2==2.7.7
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-cov==4.0.0
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyyaml==6.0.1
- s3transfer==0.5.2
- scikit-learn==0.24.2
- scipy==1.5.4
- six==1.17.0
- sklearn==0.0
- sqlalchemy==1.4.54
- tables==3.7.0
- threadpoolctl==3.1.0
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- zipp==3.6.0
prefix: /opt/conda/envs/triage
| [
"tests/test_feature_group_creator.py::test_table_group",
"tests/test_feature_group_creator.py::test_prefix_group",
"tests/test_feature_group_creator.py::test_multiple_criteria",
"tests/test_feature_group_mixer.py::test_feature_group_mixer_leave_one_out"
]
| []
| []
| []
| MIT License | 1,129 | [
"triage/pipelines/base.py",
"triage/pipelines/serial.py",
"triage/features.py",
"example_experiment_config.yaml",
"triage/feature_group_mixer.py",
"triage/pipelines/local_parallel.py",
"triage/feature_group_creator.py"
]
| [
"triage/pipelines/base.py",
"triage/pipelines/serial.py",
"triage/features.py",
"example_experiment_config.yaml",
"triage/feature_group_mixer.py",
"triage/pipelines/local_parallel.py",
"triage/feature_group_creator.py"
]
|
|
pydap__pydap-81 | 84273bf521ac625c19c2f603529cebd6523e9d61 | 2017-03-30 15:08:45 | eb8ee96bdf150642bf2e0603f406d2053af02424 | laliberte: @jameshiebert do you give your thumbs up for this to be merged?
jameshiebert: Yeah, I like the way that you split out the utility functions. Makes the code more clear. In any case, it looks good from my perspective.
laliberte: Ok, I'll go ahead and merge! | diff --git a/setup.py b/setup.py
index 0cfe3cc..d6134fa 100644
--- a/setup.py
+++ b/setup.py
@@ -44,6 +44,7 @@ tests_require = functions_extras + cas_extras + server_extras + [
testing_extras = tests_require + [
'nose',
+ 'mock',
'coverage',
'requests'
]
diff --git a/src/pydap/handlers/dap.py b/src/pydap/handlers/dap.py
index d1b66c7..5cb9b4c 100644
--- a/src/pydap/handlers/dap.py
+++ b/src/pydap/handlers/dap.py
@@ -139,43 +139,9 @@ class BaseProxy(object):
dds, data = r.body.split(b'\nData:\n', 1)
dds = dds.decode(r.content_encoding or 'ascii')
- if self.shape:
- # skip size packing
- if self.dtype.char in 'SU':
- data = data[4:]
- else:
- data = data[8:]
-
- # calculate array size
- shape = tuple(
- int(np.ceil((s.stop-s.start)/float(s.step))) for s in index)
- size = int(np.prod(shape))
-
- if self.dtype == np.byte:
- return np.fromstring(data[:size], 'B').reshape(shape)
- elif self.dtype.char in 'SU':
- out = []
- for word in range(size):
- n = np.asscalar(np.fromstring(data[:4], '>I')) # read length
- data = data[4:]
- out.append(data[:n])
- data = data[n + (-n % 4):]
- return np.array([text_type(x.decode('ascii'))
- for x in out], 'S').reshape(shape)
- else:
- try:
- return np.fromstring(data, self.dtype).reshape(shape)
- except ValueError as e:
- if str(e) == 'total size of new array must be unchanged':
- # server-side failure.
- # it is expected that the user should be mindful of this:
- raise RuntimeError(
- ('variable {0} could not be properly '
- 'retrieved. To avoid this '
- 'error consider using open_url(..., '
- 'output_grid=False).').format(quote(self.id)))
- else:
- raise
+ # Parse received dataset:
+ dataset = build_dataset(dds, data=data)
+ return dataset[self.id].data
def __len__(self):
return self.shape[0]
diff --git a/src/pydap/handlers/lib.py b/src/pydap/handlers/lib.py
index 438ac89..72222dc 100644
--- a/src/pydap/handlers/lib.py
+++ b/src/pydap/handlers/lib.py
@@ -196,12 +196,18 @@ def apply_selection(selection, dataset):
return dataset
+def degenerate_grid_to_structure(candidate):
+ if isinstance(candidate, GridType):
+ candidate = StructureType(
+ candidate.name, candidate.attributes)
+ return candidate
+
+
def apply_projection(projection, dataset):
"""Apply a given projection to a dataset.
This function builds and returns a new dataset by adding those variables
that were requested on the projection.
-
"""
out = DatasetType(name=dataset.name, attributes=dataset.attributes)
@@ -218,9 +224,7 @@ def apply_projection(projection, dataset):
# if there are more children to add we need to clear
# the candidate so it has only explicitly added
# children; also, Grids are degenerated into Structures
- if isinstance(candidate, GridType):
- candidate = StructureType(
- candidate.name, candidate.attributes)
+ candidate = degenerate_grid_to_structure(candidate)
candidate._keys = []
target[name] = candidate
target, template = target[name], template[name]
diff --git a/src/pydap/model.py b/src/pydap/model.py
index 83180d8..9c11b6e 100644
--- a/src/pydap/model.py
+++ b/src/pydap/model.py
@@ -322,8 +322,18 @@ class StructureType(DapType):
item.id = '%s.%s' % (self.id, item.name)
def __getitem__(self, key):
- key = quote(key)
- return self._dict[key]
+ try:
+ return self._dict[quote(key)]
+ except KeyError:
+ splitted = key.split('.')
+ if len(splitted) > 1:
+ try:
+ return (self
+ .__getitem__(splitted[0])['.'.join(splitted[1:])])
+ except KeyError:
+ return self.__getitem__('.'.join(splitted[1:]))
+ else:
+ raise
def __delitem__(self, key):
self._dict.__delitem__(key)
diff --git a/src/pydap/parsers/dds.py b/src/pydap/parsers/dds.py
index 7c85d64..19560b4 100644
--- a/src/pydap/parsers/dds.py
+++ b/src/pydap/parsers/dds.py
@@ -3,6 +3,7 @@
import re
import numpy as np
+from six import text_type
from . import SimpleParser
from ..model import (DatasetType, BaseType,
@@ -10,7 +11,6 @@ from ..model import (DatasetType, BaseType,
GridType)
from ..lib import quote, STRING
-
typemap = {
'byte': np.dtype("B"),
'int': np.dtype(">i4"),
@@ -32,9 +32,10 @@ class DDSParser(SimpleParser):
"""A parser for the DDS."""
- def __init__(self, dds):
+ def __init__(self, dds, data=None):
super(DDSParser, self).__init__(dds, re.IGNORECASE)
self.dds = dds
+ self.data = data
def consume(self, regexp):
"""Consume and return a token."""
@@ -81,6 +82,9 @@ class DDSParser(SimpleParser):
self.consume(';')
data = DummyData(dtype, shape)
+ if self.data is not None:
+ data, self.data = convert_data_to_array(self.data, shape,
+ dtype, name)
var = BaseType(name, data, dimensions=dimensions)
return var
@@ -156,12 +160,54 @@ class DDSParser(SimpleParser):
return grid
-def build_dataset(dds):
+def build_dataset(dds, data=None):
"""Return a dataset object from a DDS representation."""
- return DDSParser(dds).parse()
+ return DDSParser(dds, data=data).parse()
class DummyData(object):
def __init__(self, dtype, shape):
self.dtype = dtype
self.shape = shape
+
+
+def convert_data_to_array(data, shape, dtype, id):
+ if len(shape) > 0:
+ if dtype.char in 'SU':
+ data = data[4:]
+ else:
+ data = data[8:]
+
+ # calculate array size
+ size = 1
+ if len(shape) > 0:
+ size = int(np.prod(shape))
+
+ nitems = dtype.itemsize * size
+
+ if dtype == np.byte:
+ return np.fromstring(data[:nitems], 'B').reshape(shape), data[nitems:]
+ elif dtype.char in 'SU':
+ out = []
+ for word in range(size):
+ n = np.asscalar(np.fromstring(data[:4], '>I')) # read itemsize
+ data = data[4:]
+ out.append(data[:n])
+ data = data[n + (-n % 4):]
+ return np.array([text_type(x.decode('ascii'))
+ for x in out], 'S').reshape(shape), data
+ else:
+ try:
+ return (np.fromstring(data[:nitems], dtype).reshape(shape),
+ data[nitems:])
+ except ValueError as e:
+ if str(e) == 'total size of new array must be unchanged':
+ # server-side failure.
+ # it is expected that the user should be mindful of this:
+ raise RuntimeError(
+ ('variable {0} could not be properly '
+ 'retrieved. To avoid this '
+ 'error consider using open_url(..., '
+ 'output_grid=False).').format(quote(id)))
+ else:
+ raise
| Test Erddap responses
Some Erddap servers return invalid DDS responses when a deep object is returned. In that case, we should parse the DDS from the DODS response and extract the data using it.
| pydap/pydap | diff --git a/src/pydap/tests/test_handlers_dap.py b/src/pydap/tests/test_handlers_dap.py
index f6cfe31..34abe45 100644
--- a/src/pydap/tests/test_handlers_dap.py
+++ b/src/pydap/tests/test_handlers_dap.py
@@ -8,6 +8,10 @@ from pydap.tests.datasets import (
SimpleSequence, SimpleGrid, SimpleArray, VerySimpleSequence)
import unittest
+try:
+ from unittest.mock import patch
+except ImportError:
+ from mock import patch
class TestDapHandler(unittest.TestCase):
@@ -50,6 +54,67 @@ class TestDapHandler(unittest.TestCase):
self.assertEqual(
dataset.SimpleGrid.SimpleGrid.data.slice,
(slice(None), slice(None)))
+ self.assertEqual(
+ repr(dataset.SimpleGrid[:]),
+ "<GridType with array 'SimpleGrid' and maps 'x', 'y'>")
+
+ def test_grid_erddap(self):
+ """Test that dataset has the correct data proxies for grids
+ with the ERDDAP behavior."""
+ with patch('pydap.handlers.lib.degenerate_grid_to_structure',
+ side_effect=(lambda x: x)) as mock_degenerate:
+ dataset = DAPHandler("http://localhost:8001/", self.app1).dataset
+ self.assertEqual(
+ repr(dataset.SimpleGrid[:]),
+ "<GridType with array 'SimpleGrid' and maps 'x', 'y'>")
+ assert mock_degenerate.called
+
+ def test_grid_output_grid_false(self):
+ """Test that dataset has the correct data proxies for grids with
+ option output_grid set to False."""
+ dataset = DAPHandler("http://localhost:8001/", self.app1,
+ output_grid=False).dataset
+
+ self.assertEqual(dataset.keys(), ["SimpleGrid", "x", "y"])
+ self.assertEqual(
+ dataset.SimpleGrid.keys(), ["SimpleGrid", "x", "y"])
+
+ # test one of the axis
+ self.assertIsInstance(dataset.SimpleGrid.x.data, BaseProxy)
+ self.assertEqual(
+ dataset.SimpleGrid.x.data.baseurl, "http://localhost:8001/")
+ self.assertEqual(dataset.SimpleGrid.x.data.id, "SimpleGrid.x")
+ self.assertEqual(dataset.SimpleGrid.x.data.dtype, np.dtype('>i4'))
+ self.assertEqual(dataset.SimpleGrid.x.data.shape, (3,))
+ self.assertEqual(
+ dataset.SimpleGrid.x.data.slice, (slice(None),))
+
+ # test the grid
+ self.assertIsInstance(dataset.SimpleGrid.SimpleGrid.data, BaseProxy)
+ self.assertEqual(
+ dataset.SimpleGrid.SimpleGrid.data.baseurl,
+ "http://localhost:8001/")
+ self.assertEqual(
+ dataset.SimpleGrid.SimpleGrid.data.id, "SimpleGrid.SimpleGrid")
+ self.assertEqual(
+ dataset.SimpleGrid.SimpleGrid.data.dtype, np.dtype('>i4'))
+ self.assertEqual(dataset.SimpleGrid.SimpleGrid.data.shape, (2, 3))
+ self.assertEqual(
+ dataset.SimpleGrid.SimpleGrid.data.slice,
+ (slice(None), slice(None)))
+ np.testing.assert_array_equal(dataset.SimpleGrid[:],
+ [[0, 1, 2], [3, 4, 5]])
+
+ def test_grid_erddap_output_grid_false(self):
+ """Test that dataset has the correct data proxies for grids with
+ option output_grid set to False and with the ERDDAP behavior."""
+ with patch('pydap.handlers.lib.degenerate_grid_to_structure',
+ side_effect=(lambda x: x)) as mock_degenerate:
+ dataset = DAPHandler("http://localhost:8001/", self.app1,
+ output_grid=False).dataset
+ np.testing.assert_array_equal(dataset.SimpleGrid[:],
+ [[0, 1, 2], [3, 4, 5]])
+ assert mock_degenerate.called
def test_grid_with_projection(self):
"""Test that a sliced proxy can be created for grids."""
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 3
},
"num_modified_files": 5
} | 3.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[testing]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"nose",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gfortran libblas-dev liblapack-dev"
],
"python": "3.6",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
beautifulsoup4==4.12.3
certifi==2021.5.30
charset-normalizer==2.0.12
coards==1.0.5
coverage==6.2
docopt==0.6.2
flake8==5.0.4
gsw==3.4.0
gunicorn==21.2.0
idna==3.10
importlib-metadata==4.2.0
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
Jinja2==3.0.3
lxml==5.3.1
MarkupSafe==2.0.1
mccabe==0.7.0
MechanicalSoup==1.3.0
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
nose==1.3.7
numpy==1.19.5
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pycodestyle==2.9.1
-e git+https://github.com/pydap/pydap.git@84273bf521ac625c19c2f603529cebd6523e9d61#egg=Pydap
pyflakes==2.5.0
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
requests==2.27.1
scipy==1.5.4
six==1.17.0
soupsieve==2.3.2.post1
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
urllib3==1.26.20
waitress==2.0.0
WebOb==1.8.9
WebTest==3.0.0
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: pydap
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- beautifulsoup4==4.12.3
- charset-normalizer==2.0.12
- coards==1.0.5
- coverage==6.2
- docopt==0.6.2
- flake8==5.0.4
- gsw==3.4.0
- gunicorn==21.2.0
- idna==3.10
- importlib-metadata==4.2.0
- jinja2==3.0.3
- lxml==5.3.1
- markupsafe==2.0.1
- mccabe==0.7.0
- mechanicalsoup==1.3.0
- nose==1.3.7
- numpy==1.19.5
- pycodestyle==2.9.1
- pyflakes==2.5.0
- requests==2.27.1
- scipy==1.5.4
- six==1.17.0
- soupsieve==2.3.2.post1
- urllib3==1.26.20
- waitress==2.0.0
- webob==1.8.9
- webtest==3.0.0
prefix: /opt/conda/envs/pydap
| [
"src/pydap/tests/test_handlers_dap.py::TestDapHandler::test_grid_erddap",
"src/pydap/tests/test_handlers_dap.py::TestDapHandler::test_grid_erddap_output_grid_false"
]
| []
| [
"src/pydap/tests/test_handlers_dap.py::TestDapHandler::test_base_type_with_projection",
"src/pydap/tests/test_handlers_dap.py::TestDapHandler::test_grid",
"src/pydap/tests/test_handlers_dap.py::TestDapHandler::test_grid_array_with_projection",
"src/pydap/tests/test_handlers_dap.py::TestDapHandler::test_grid_map_with_projection",
"src/pydap/tests/test_handlers_dap.py::TestDapHandler::test_grid_output_grid_false",
"src/pydap/tests/test_handlers_dap.py::TestDapHandler::test_grid_with_projection",
"src/pydap/tests/test_handlers_dap.py::TestDapHandler::test_sequence",
"src/pydap/tests/test_handlers_dap.py::TestDapHandler::test_sequence_with_projection",
"src/pydap/tests/test_handlers_dap.py::TestBaseProxy::test_comparisons",
"src/pydap/tests/test_handlers_dap.py::TestBaseProxy::test_getitem",
"src/pydap/tests/test_handlers_dap.py::TestBaseProxy::test_getitem_out_of_bound",
"src/pydap/tests/test_handlers_dap.py::TestBaseProxy::test_inexact_division",
"src/pydap/tests/test_handlers_dap.py::TestBaseProxy::test_iteration",
"src/pydap/tests/test_handlers_dap.py::TestBaseProxy::test_len",
"src/pydap/tests/test_handlers_dap.py::TestBaseProxy::test_repr",
"src/pydap/tests/test_handlers_dap.py::TestBaseProxyShort::test_getitem",
"src/pydap/tests/test_handlers_dap.py::TestBaseProxyString::test_getitem",
"src/pydap/tests/test_handlers_dap.py::TestSequenceProxy::test_attributes",
"src/pydap/tests/test_handlers_dap.py::TestSequenceProxy::test_comparisons",
"src/pydap/tests/test_handlers_dap.py::TestSequenceProxy::test_getitem",
"src/pydap/tests/test_handlers_dap.py::TestSequenceProxy::test_iter",
"src/pydap/tests/test_handlers_dap.py::TestSequenceProxy::test_iter_child",
"src/pydap/tests/test_handlers_dap.py::TestSequenceProxy::test_repr",
"src/pydap/tests/test_handlers_dap.py::TestSequenceProxy::test_url",
"src/pydap/tests/test_handlers_dap.py::TestSequenceWithString::test_filtering",
"src/pydap/tests/test_handlers_dap.py::TestSequenceWithString::test_iter",
"src/pydap/tests/test_handlers_dap.py::TestSequenceWithString::test_iter_child",
"src/pydap/tests/test_handlers_dap.py::TestSequenceWithString::test_projection",
"src/pydap/tests/test_handlers_dap.py::TestStringBaseType::test_getitem",
"src/pydap/tests/test_handlers_dap.py::TestArrayStringBaseType::test_getitem"
]
| []
| MIT License | 1,130 | [
"src/pydap/handlers/lib.py",
"setup.py",
"src/pydap/parsers/dds.py",
"src/pydap/model.py",
"src/pydap/handlers/dap.py"
]
| [
"src/pydap/handlers/lib.py",
"setup.py",
"src/pydap/parsers/dds.py",
"src/pydap/model.py",
"src/pydap/handlers/dap.py"
]
|
dask__dask-2144 | 47c68462b0b2148fdcaf6d5bcb992e376a0dd56f | 2017-03-30 15:24:26 | bdb021c7dcd94ae1fa51c82fae6cf4cf7319aa14 | jcrist: I couldn't think of a good way to test this, but also don't really think it needs a test. We'd want to raise a `KeyboardInterrupt` in the main thread while the scheduler (also running in the main thread) is blocked by a call to `Queue.get`. Not sure if this is possible. | diff --git a/.travis.yml b/.travis.yml
index 4e2415057..d878274f6 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -5,9 +5,11 @@ env:
matrix:
- PYTHON=2.7 NUMPY=1.10.4 PANDAS=0.19.0 COVERAGE='true' PARALLEL='false' TEST_IMPORTS='false' XTRATESTARGS=
- PYTHON=2.7 NUMPY=1.11.0 PANDAS=0.19.2 COVERAGE='false' PARALLEL='true' TEST_IMPORTS='false' PYTHONOPTIMIZE=2 XTRATESTARGS=--ignore=dask/diagnostics
+ - PYTHON=2.7 NUMPY=1.12.1 PANDAS=0.19.2 COVERAGE='false' PARALLEL='true' TEST_IMPORTS='false' PYTHONOPTIMIZE=2 XTRATESTARGS=--ignore=dask/diagnostics
- PYTHON=3.4 NUMPY=1.10.4 PANDAS=0.19.1 COVERAGE='false' PARALLEL='true' TEST_IMPORTS='false' PYTHONOPTIMIZE=2 XTRATESTARGS=--ignore=dask/diagnostics
- PYTHON=3.5 NUMPY=1.11.0 PANDAS=0.19.2 COVERAGE='false' PARALLEL='true' TEST_IMPORTS='false' XTRATESTARGS=
- PYTHON=3.6 NUMPY=1.11.2 PANDAS=0.19.2 COVERAGE='false' PARALLEL='true' TEST_IMPORTS='true' XTRATESTARGS=
+ - PYTHON=3.6 NUMPY=1.12.1 PANDAS=0.19.2 COVERAGE='false' PARALLEL='true' TEST_IMPORTS='true' XTRATESTARGS=
addons:
apt:
diff --git a/appveyor.yml b/appveyor.yml
index b0837c310..0f54df9e2 100644
--- a/appveyor.yml
+++ b/appveyor.yml
@@ -12,7 +12,7 @@ environment:
# Since appveyor is quite slow, we only use a single configuration
- PYTHON: "3.5"
ARCH: "64"
- NUMPY: "1.11"
+ NUMPY: "1.12"
PANDAS: "0.19.2"
CONDA_ENV: testenv
diff --git a/dask/array/__init__.py b/dask/array/__init__.py
index 23cd3800a..57a6fd8f0 100644
--- a/dask/array/__init__.py
+++ b/dask/array/__init__.py
@@ -3,7 +3,7 @@ from __future__ import absolute_import, division, print_function
from ..utils import ignoring
from .core import (Array, stack, concatenate, take, tensordot, transpose,
from_array, choose, where, coarsen, insert, broadcast_to, ravel,
- roll, fromfunction, unique, store, squeeze, topk, bincount,
+ roll, fromfunction, unique, store, squeeze, topk, bincount, tile,
digitize, histogram, map_blocks, atop, to_hdf5, dot, cov, array,
dstack, vstack, hstack, to_npy_stack, from_npy_stack, compress,
from_delayed, round, swapaxes, repeat, asarray)
diff --git a/dask/array/core.py b/dask/array/core.py
index 365d832a8..b0d9d229f 100644
--- a/dask/array/core.py
+++ b/dask/array/core.py
@@ -2618,6 +2618,8 @@ def elemwise(op, *args, **kwargs):
msg = "%s does not take the following keyword arguments %s"
raise TypeError(msg % (op.__name__, str(sorted(set(kwargs) - set(['name', 'dtype'])))))
+ args = [np.asarray(a) if isinstance(a, (list, tuple)) else a for a in args]
+
shapes = [getattr(arg, 'shape', ()) for arg in args]
shapes = [s if isinstance(s, Iterable) else () for s in shapes]
out_ndim = len(broadcast_shapes(*shapes)) # Raises ValueError if dimensions mismatch
@@ -2839,7 +2841,7 @@ def roll(array, shift, axis=None):
result = ravel(result)
if not isinstance(shift, Integral):
- TypeError(
+ raise TypeError(
"Expect `shift` to be an instance of Integral"
" when `axis` is None."
)
@@ -2856,6 +2858,9 @@ def roll(array, shift, axis=None):
except TypeError:
axis = (axis,)
+ if len(shift) != len(axis):
+ raise ValueError("Must have the same number of shifts as axes.")
+
for i, s in zip(axis, shift):
s = -s
s %= result.shape[i]
@@ -3794,7 +3799,7 @@ def swapaxes(a, axis1, axis2):
dtype=a.dtype)
-@wraps(np.dot)
+@wraps(np.repeat)
def repeat(a, repeats, axis=None):
if axis is None:
if a.ndim == 1:
@@ -3835,6 +3840,21 @@ def repeat(a, repeats, axis=None):
return concatenate(out, axis=axis)
+@wraps(np.tile)
+def tile(A, reps):
+ if not isinstance(reps, Integral):
+ raise NotImplementedError("Only integer valued `reps` supported.")
+
+ if reps < 0:
+ raise ValueError("Negative `reps` are not allowed.")
+ elif reps == 0:
+ return A[..., :0]
+ elif reps == 1:
+ return A
+
+ return concatenate(reps * [A], axis=-1)
+
+
def slice_with_dask_array(x, index):
y = elemwise(getitem, x, index, dtype=x.dtype)
diff --git a/dask/async.py b/dask/async.py
index e933b889f..058f2e73f 100644
--- a/dask/async.py
+++ b/dask/async.py
@@ -114,10 +114,11 @@ See the function ``inline_functions`` for more information.
"""
from __future__ import absolute_import, division, print_function
+import os
import sys
import traceback
-from .compatibility import Queue
+from .compatibility import Queue, Empty
from .core import (istask, flatten, reverse_dict, get_dependencies, ishashable,
has_tasks)
from .context import _globals
@@ -127,6 +128,30 @@ from .optimize import cull
from .utils_test import add, inc # noqa: F401
+if sys.version_info.major < 3:
+ # Due to a bug in python 2.7 Queue.get, if a timeout isn't specified then
+ # `Queue.get` can't be interrupted. A workaround is to specify an extremely
+ # long timeout, which then allows it to be interrupted.
+ # For more information see: https://bugs.python.org/issue1360
+ def queue_get(q):
+ return q.get(block=True, timeout=(365 * 24 * 60 * 60))
+
+elif os.name == 'nt':
+ # Python 3 windows Queue.get also doesn't handle interrupts properly. To
+ # workaround this we poll at a sufficiently large interval that it
+ # shouldn't affect performance, but small enough that users trying to kill
+ # an application shouldn't care.
+ def queue_get(q):
+ while True:
+ try:
+ return q.get(block=True, timeout=0.1)
+ except Empty:
+ pass
+else:
+ def queue_get(q):
+ return q.get()
+
+
DEBUG = False
@@ -488,7 +513,7 @@ def get_async(apply_async, num_workers, dsk, result, cache=None,
# Main loop, wait on tasks to finish, insert new ones
while state['waiting'] or state['ready'] or state['running']:
- key, res_info = queue.get()
+ key, res_info = queue_get(queue)
res, tb, worker_id = loads(res_info)
if isinstance(res, BaseException):
if rerun_exceptions_locally:
diff --git a/docs/source/array-api.rst b/docs/source/array-api.rst
index 4750f8b0d..32503530b 100644
--- a/docs/source/array-api.rst
+++ b/docs/source/array-api.rst
@@ -127,6 +127,7 @@ Top level user functions:
tan
tanh
tensordot
+ tile
topk
transpose
tril
diff --git a/docs/source/debugging.rst b/docs/source/debugging.rst
new file mode 100644
index 000000000..105a7857e
--- /dev/null
+++ b/docs/source/debugging.rst
@@ -0,0 +1,238 @@
+Debugging
+=========
+
+Debugging parallel programs is hard. Normal debugging tools like logging and
+using ``pdb`` to interact with tracebacks stop working normally when exceptions
+occur in far-away machines or different processes or threads.
+
+Dask has a variety of mechanisms to make this process easier. Depending on
+your situation some of these approaches may be more appropriate than others.
+
+These approaches are ordered from lightweight or easy solutions to more
+involved solutions.
+
+Exceptions
+----------
+
+When a task in your computation fails the standard way of understanding what
+went wrong is to look at the exception and traceback. Often people do this
+with the ``pdb`` module, IPython ``%debug`` or ``%pdb`` magics, or by just
+looking at the traceback and investigating where in their code the exception
+occurred.
+
+Normally when a computation computes in a separate thread or a different
+machine these approaches break down. Dask provides a few mechanisms to
+recreate the normal Python debugging experience.
+
+Inspect Exceptions and Tracebacks
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+By default, Dask already copies the exception and traceback wherever they
+occur and reraises that exception locally. If your task failed with a
+``ZeroDivisionError`` remotely then you'll get a ``ZeroDivisionError`` in your
+interactive session. Similarly you'll see a full traceback of where this error
+occurred, which, just like in normal Python, can help you to identify the
+troublsome spot in your code.
+
+However, you cannot use the ``pdb`` module or ``%debug`` IPython magics with
+these tracebacks to look at the value of variables during failure. You can
+only inspect things visually. Additionally, the top of the traceback may be
+filled with functions that are dask-specific and not relevant to your
+problem, you can safely ignore these.
+
+Both the single-machine and distributed schedulers do this.
+
+
+Use the Single-Threaded Scheduler
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Dask ships with a simple single-threaded scheduler. This doesn't offer any
+parallel performance improvements, but does run your Dask computation
+faithfully in your local thread, allowing you to use normal tools like ``pdb``,
+``%debug`` IPython magics, the profiling tools like the ``cProfile`` module and
+`snakeviz <https://jiffyclub.github.io/snakeviz/>`_. This allows you to use
+all of your normal Python debugging tricks in Dask computations, as long as you
+don't need parallelism.
+
+This only works for single-machine schedulers. It does not work with
+dask.distributed unless you are comfortable using the Tornado API (look at the
+`testing infrastructure
+<http://distributed.readthedocs.io/en/latest/develop.html#writing-tests>`_
+docs, which accomplish this). Also, because this operates on a single machine
+it assumes that your computation can run on a single machine without exceeding
+memory limits. It may be wise to use this approach on smaller versions of your
+problem if possible.
+
+
+Rerun Failed Task Locally
+~~~~~~~~~~~~~~~~~~~~~~~~~
+
+If a remote task fails, we can collect the function and all inputs, bring them
+to the local thread, and then rerun the function in hopes of triggering the
+same exception locally, where normal debugging tools can be used.
+
+With the single machine schedulers, use the ``rerun_exceptions_locally=True``
+keyword.
+
+.. code-block:: python
+
+ x.compute(rerun_exceptions_locally=True)
+
+On the distributed scheduler use the ``recreate_error_locally`` method on
+anything that contains ``Futures`` :
+
+.. code-block:: python
+
+ >>> x.compute()
+ ZeroDivisionError(...)
+
+ >>> %pdb
+ >>> future = client.compute(x)
+ >>> client.recreate_error_locally(future)
+
+
+Remove Failed Futures Manually
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Sometimes only parts of your computations fail, for example if some rows of a
+CSV dataset are faulty in some way. When running with the distributed
+scheduler you can remove chunks of your data that have produced bad results if
+you switch to dealing with Futures.
+
+.. code-block:: python
+
+ >>> import dask.dataframe as dd
+ >>> df = ... # create dataframe
+ >>> df = df.persist() # start computing on the cluster
+
+ >>> from distributed.client import futures_of
+ >>> futures = futures_of(df) # get futures behind dataframe
+ >>> futures
+ [<Future: status: finished, type: pd.DataFrame, key: load-1>
+ <Future: status: finished, type: pd.DataFrame, key: load-2>
+ <Future: status: error, key: load-3>
+ <Future: status: pending, key: load-4>
+ <Future: status: error, key: load-5>]
+
+ >>> # wait until computation is done
+ >>> while any(f.status == 'pending' for f in futures):
+ ... sleep(0.1)
+
+ >>> # pick out only the successful futures and reconstruct the dataframe
+ >>> good_futures = [f for f in futures if f.status == 'finished']
+ >>> df = dd.from_delayed(good_futures, meta=df._meta)
+
+This is a bit of a hack, but often practical when first exploring messy data.
+If you are using the concurrent.futures API (map, submit, gather) then this
+approach is more natural.
+
+
+Inspect Scheduling State
+------------------------
+
+Not all errors present themselves as Exceptions. For example in a distributed
+system workers may die unexpectedly or your computation may be unreasonably
+slow due to inter-worker communication or scheduler overhead or one of several
+other issues. Getting feedback about what's going on can help to identify
+both failures and general performance bottlenecks.
+
+For the single-machine scheduler see :doc:`diagnostics <diagnostics`
+documentation. The rest of the section will assume that you are using the
+`distributed scheduler <http://distributed.readthedocs.io/en/latest/>`_ where
+these issues arise more commonly.
+
+Web Diagnostics
+~~~~~~~~~~~~~~~
+
+First, the distributed scheduler has a number of `diagnostic web pages
+<http://distributed.readthedocs.io/en/latest/web.html>`_ showing dozens of
+recorded metrics like CPU, memory, network, and disk use, a history of previous
+tasks, allocation of tasks to workers, worker memory pressure, work stealing,
+open file handle limits, etc.. *Many* problems can be correctly diagnosed by
+inspecting these pages. By default these are available at
+``http://scheduler:8787/`` ``http://scheduler:8788/`` and ``http://worker:8789/``
+where ``scheduler`` and ``worker`` should be replaced by the addresses of the
+scheduler and each of the workers. See `web diagnostic docs
+<http://distributed.readthedocs.io/en/latest/web.html>`_ for more information.
+
+Logs
+~~~~
+
+The scheduler and workers and client all emits logs using `Python's standard
+logging module <https://docs.python.org/3/library/logging.html>`_. By default
+these emit to standard error. When Dask is launched by a cluster job scheduler
+(SGE/SLURM/YARN/Mesos/Marathon/Kubernetes/whatever) that system will track
+these logs and will have an interface to help you access them. If you are
+launching Dask on your own they will probably dump to the screen unless you
+`redirect stderr to a file
+<https://en.wikipedia.org/wiki/Redirection_(computing)#Redirecting_to_and_from_the_standard_file_handles>`_
+.
+
+You can control the logging verbosity in the ``~/.dask/config.yaml`` file.
+Defaults currently look like the following:
+
+.. code-block:: yaml
+
+ logging:
+ distributed: info
+ distributed.client: warning
+ bokeh: error
+
+So for example you could add a line like ``distributed.worker: debug`` to get
+*very* verbose output from the workers.
+
+
+LocalCluster
+------------
+
+If you are using the distributed scheduler from a single machine you may be
+setting up workers manually using the command line interface or you may be
+using `LocalCluster <http://distributed.readthedocs.io/en/latest/local-cluster.html>`_
+which is what runs when you just call ``Client()``
+
+.. code-block:: python
+
+ >>> from dask.distributed import Client, LocalCluster
+ >>> client = Client() # This is actually the following two commands
+
+ >>> cluster = LocalCluster()
+ >>> client = Client(cluster.scheduler.address)
+
+LocalCluster is useful because the scheduler and workers are in the same
+process with you, so you can easily inspect their `state
+<http://distributed.readthedocs.io/en/latest/scheduling-state.html>`_ while
+they run (they are running in a separate thread).
+
+.. code-block:: python
+
+ >>> cluster.scheduler.processing
+ {'worker-one:59858': {'inc-123', 'add-443'},
+ 'worker-two:48248': {'inc-456'}}
+
+You can also do this for the workers *if* you run them without nanny processes.
+
+.. code-block:: python
+
+ >>> cluster = LocalCluster(nanny=False)
+ >>> client = Client(cluster)
+
+This can be very helpful if you want to use the dask.distributed API and still
+want to investigate what is going on directly within the workers. Information
+is not distilled for you like it is in the web diagnostics, but you have full
+low-level access.
+
+
+Inspect state with IPython
+--------------------------
+
+Sometimes you want to inspect the state of your cluster, but you don't have the
+luxury of operating on a single machine. In these cases you can launch an
+IPython kernel on the scheduler and on every worker, which lets you inspect
+state on the scheduler and workers as computations are completing.
+
+This does not give you the ability to run ``%pdb`` or ``%debug`` on remote
+machines, the tasks are still running in separate threads, and so are not
+easily accessible from an interactive IPython session.
+
+For more details, see the `Dask.distributed IPython docs
+<http://distributed.readthedocs.io/en/latest/ipython.html>`_.
diff --git a/docs/source/index.rst b/docs/source/index.rst
index b6bcb1770..5cc89e524 100644
--- a/docs/source/index.rst
+++ b/docs/source/index.rst
@@ -231,6 +231,7 @@ often a better choice. If you are a *core developer*, then you should start her
**Help & reference**
+* :doc:`debugging`
* :doc:`support`
* :doc:`changelog`
* :doc:`presentations`
@@ -247,6 +248,7 @@ often a better choice. If you are a *core developer*, then you should start her
:hidden:
:caption: Help & reference
+ debugging.rst
support.rst
changelog.rst
presentations.rst
| Bug: threaded scheduler is not interruptible
```
import dask, time
dask.__version__
> u'0.14.1'
# can be interrupted
fun = dask.delayed(lambda x: time.sleep(5))
dask.compute([fun(x) for x in range(100)], get = dask.threaded.get)
# can't be interrupted
fun2 = dask.delayed(lambda x: sum([1 for x in range(10000000)]))
dask.compute([fun2(x) for x in range(100)], get = dask.threaded.get)
```
Using the threaded scheduler, I noticed that some tasks cannot be interrupted by pressing Ctrl+C in a python console. Directly after pressing Ctrl+C the CPU usage of that process jumps up: It uses all available minus one cores. Judging by the output of top, the process seems to spend the computing time in the kernel space.
| dask/dask | diff --git a/dask/array/tests/test_array_core.py b/dask/array/tests/test_array_core.py
index cb35eb3ac..b9b185b49 100644
--- a/dask/array/tests/test_array_core.py
+++ b/dask/array/tests/test_array_core.py
@@ -712,14 +712,31 @@ def test_ravel():
assert_eq(np.ravel(x), da.ravel(a))
+def _maybe_len(l):
+ try:
+ return len(l)
+ except TypeError:
+ return 0
+
+
@pytest.mark.parametrize('chunks', [(4, 6), (2, 6)])
[email protected]('shift', [3, 7, 9])
[email protected]('axis', [None, 0, 1, -1])
[email protected]('shift', [3, 7, 9, (3, 9), (7, 2)])
[email protected]('axis', [None, 0, 1, -1, (0, 1), (1, 0)])
def test_roll(chunks, shift, axis):
x = np.random.randint(10, size=(4, 6))
a = from_array(x, chunks=chunks)
- assert_eq(np.roll(x, shift, axis), da.roll(a, shift, axis))
+ if _maybe_len(shift) != _maybe_len(axis):
+ with pytest.raises(TypeError if axis is None else ValueError):
+ da.roll(a, shift, axis)
+ else:
+ if (_maybe_len(shift) > 1 and
+ LooseVersion(np.__version__) < LooseVersion("1.12.0")):
+ pytest.skip(
+ "NumPy %s doesn't support multiple axes with `roll`."
+ " Need NumPy 1.12.0 or greater." % np.__version__
+ )
+ assert_eq(np.roll(x, shift, axis), da.roll(a, shift, axis))
@pytest.mark.parametrize('original_shape,new_shape,chunks', [
@@ -2572,6 +2589,44 @@ def test_repeat():
assert all(concat(d.repeat(r).chunks))
[email protected]('shape, chunks', [
+ ((10,), (1,)),
+ ((10, 11, 13), (4, 5, 3)),
+])
[email protected]('reps', [0, 1, 2, 3, 5])
+def test_tile(shape, chunks, reps):
+ x = np.random.random(shape)
+ d = da.from_array(x, chunks=chunks)
+
+ assert_eq(np.tile(x, reps), da.tile(d, reps))
+
+
[email protected]('shape, chunks', [
+ ((10,), (1,)),
+ ((10, 11, 13), (4, 5, 3)),
+])
[email protected]('reps', [-1, -5])
+def test_tile_neg_reps(shape, chunks, reps):
+ x = np.random.random(shape)
+ d = da.from_array(x, chunks=chunks)
+
+ with pytest.raises(ValueError):
+ da.tile(d, reps)
+
+
[email protected]('shape, chunks', [
+ ((10,), (1,)),
+ ((10, 11, 13), (4, 5, 3)),
+])
[email protected]('reps', [[1], [1, 2]])
+def test_tile_array_reps(shape, chunks, reps):
+ x = np.random.random(shape)
+ d = da.from_array(x, chunks=chunks)
+
+ with pytest.raises(NotImplementedError):
+ da.tile(d, reps)
+
+
def test_concatenate_stack_dont_warn():
with warnings.catch_warnings(record=True) as record:
da.concatenate([da.ones(2, chunks=1)] * 62)
@@ -2835,3 +2890,20 @@ def test_atop_with_numpy_arrays():
s = da.sum(x)
assert any(x is v for v in s.dask.values())
+
+
[email protected]('chunks', (100, 6))
[email protected]('other', [[0, 0, 1], [2, 1, 3], (0, 0, 1)])
+def test_elemwise_with_lists(chunks, other):
+ x = np.arange(12).reshape((4, 3))
+ d = da.arange(12, chunks=chunks).reshape((4, 3))
+
+ x2 = np.vstack([x[:, 0], x[:, 1], x[:, 2]]).T
+ d2 = da.vstack([d[:, 0], d[:, 1], d[:, 2]]).T
+
+ assert_eq(x2, d2)
+
+ x3 = x2 * other
+ d3 = d2 * other
+
+ assert_eq(x3, d3)
diff --git a/dask/tests/test_threaded.py b/dask/tests/test_threaded.py
index a377e9449..7b27a5963 100644
--- a/dask/tests/test_threaded.py
+++ b/dask/tests/test_threaded.py
@@ -1,6 +1,9 @@
-from multiprocessing.pool import ThreadPool
+import os
import threading
-from threading import Thread
+import signal
+import subprocess
+import sys
+from multiprocessing.pool import ThreadPool
from time import time, sleep
import pytest
@@ -52,7 +55,7 @@ def test_threaded_within_thread():
before = threading.active_count()
for i in range(20):
- t = Thread(target=f, args=(1,))
+ t = threading.Thread(target=f, args=(1,))
t.daemon = True
t.start()
t.join()
@@ -91,7 +94,7 @@ def test_thread_safety():
threads = []
for i in range(20):
- t = Thread(target=test_f)
+ t = threading.Thread(target=test_f)
t.daemon = True
t.start()
threads.append(t)
@@ -100,3 +103,63 @@ def test_thread_safety():
thread.join()
assert L == [1] * 20
+
+
+code = """
+import sys
+from dask.threaded import get
+
+def signal_started():
+ sys.stdout.write('started\\n')
+ sys.stdout.flush()
+
+def long_task(x):
+ out = 0
+ N = 100000
+ for i in range(N):
+ for j in range(N):
+ out += 1
+
+dsk = {('x', i): (long_task, 'started') for i in range(100)}
+dsk['started'] = (signal_started,)
+dsk['x'] = (sum, list(dsk.keys()))
+get(dsk, 'x')
+"""
+
+
+# TODO: this test passes locally on windows, but fails on appveyor
+# because the ctrl-c event also tears down their infrastructure.
+# There's probably a better way to test this, but for now we'll mark
+# it slow (slow tests are skipped on appveyor).
[email protected]
+def test_interrupt():
+ try:
+ proc = subprocess.Popen([sys.executable, '-c', code],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ # Wait for scheduler to start
+ msg = proc.stdout.readline()
+ if msg != 'started\n' and proc.poll() is not None:
+ assert False, "subprocess failed"
+ # Scheduler has started, send an interrupt
+ sigint = signal.CTRL_C_EVENT if os.name == 'nt' else signal.SIGINT
+ try:
+ proc.send_signal(sigint)
+ # Wait a bit for it to die
+ start = time()
+ while time() - start < 0.5:
+ if proc.poll() is not None:
+ break
+ sleep(0.05)
+ else:
+ assert False, "KeyboardInterrupt Failed"
+ except KeyboardInterrupt:
+ # On windows the interrupt is also raised in this process.
+ # That's silly, ignore it.
+ pass
+ # Ensure KeyboardInterrupt in traceback
+ stderr = proc.stderr.read()
+ assert "KeyboardInterrupt" in stderr.decode()
+ except:
+ proc.terminate()
+ raise
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_added_files",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 7
} | 1.16 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[complete]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"moto",
"mock"
],
"pre_install": null,
"python": "3.6",
"reqs_path": [
"docs/requirements-docs.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | aiobotocore==2.1.2
aiohttp==3.8.6
aioitertools==0.11.0
aiosignal==1.2.0
alabaster==0.7.13
async-timeout==4.0.2
asynctest==0.13.0
attrs==22.2.0
Babel==2.11.0
boto3==1.23.10
botocore==1.23.24
certifi==2021.5.30
cffi==1.15.1
charset-normalizer==2.0.12
click==8.0.4
cloudpickle==2.2.1
cryptography==40.0.2
-e git+https://github.com/dask/dask.git@47c68462b0b2148fdcaf6d5bcb992e376a0dd56f#egg=dask
dataclasses==0.8
distributed==1.19.3
docutils==0.18.1
frozenlist==1.2.0
fsspec==2022.1.0
HeapDict==1.0.1
idna==3.10
idna-ssl==1.1.0
imagesize==1.4.1
importlib-metadata==4.8.3
iniconfig==1.1.1
Jinja2==3.0.3
jmespath==0.10.0
locket==1.0.0
MarkupSafe==2.0.1
mock==5.2.0
moto==4.0.13
msgpack-python==0.5.6
multidict==5.2.0
numpy==1.19.5
numpydoc==1.1.0
packaging==21.3
pandas==1.1.5
partd==1.2.0
pluggy==1.0.0
psutil==7.0.0
py==1.11.0
pycparser==2.21
Pygments==2.14.0
pyparsing==3.1.4
pytest==7.0.1
python-dateutil==2.9.0.post0
pytz==2025.2
PyYAML==6.0.1
requests==2.27.1
responses==0.17.0
s3fs==2022.1.0
s3transfer==0.5.2
six==1.17.0
snowballstemmer==2.2.0
sortedcontainers==2.4.0
Sphinx==5.3.0
sphinx-rtd-theme==2.0.0
sphinxcontrib-applehelp==1.0.2
sphinxcontrib-devhelp==1.0.2
sphinxcontrib-htmlhelp==2.0.0
sphinxcontrib-jquery==4.1
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==1.0.3
sphinxcontrib-serializinghtml==1.1.5
tblib==1.7.0
tomli==1.2.3
toolz==0.12.0
tornado==6.1
typing_extensions==4.1.1
urllib3==1.26.20
Werkzeug==2.0.3
wrapt==1.16.0
xmltodict==0.14.2
yarl==1.7.2
zict==2.1.0
zipp==3.6.0
| name: dask
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- aiobotocore==2.1.2
- aiohttp==3.8.6
- aioitertools==0.11.0
- aiosignal==1.2.0
- alabaster==0.7.13
- async-timeout==4.0.2
- asynctest==0.13.0
- attrs==22.2.0
- babel==2.11.0
- boto3==1.23.10
- botocore==1.23.24
- cffi==1.15.1
- charset-normalizer==2.0.12
- click==8.0.4
- cloudpickle==2.2.1
- cryptography==40.0.2
- dataclasses==0.8
- distributed==1.19.3
- docutils==0.18.1
- frozenlist==1.2.0
- fsspec==2022.1.0
- heapdict==1.0.1
- idna==3.10
- idna-ssl==1.1.0
- imagesize==1.4.1
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- jinja2==3.0.3
- jmespath==0.10.0
- locket==1.0.0
- markupsafe==2.0.1
- mock==5.2.0
- moto==4.0.13
- msgpack-python==0.5.6
- multidict==5.2.0
- numpy==1.19.5
- numpydoc==1.1.0
- packaging==21.3
- pandas==1.1.5
- partd==1.2.0
- pluggy==1.0.0
- psutil==7.0.0
- py==1.11.0
- pycparser==2.21
- pygments==2.14.0
- pyparsing==3.1.4
- pytest==7.0.1
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyyaml==6.0.1
- requests==2.27.1
- responses==0.17.0
- s3fs==2022.1.0
- s3transfer==0.5.2
- six==1.17.0
- snowballstemmer==2.2.0
- sortedcontainers==2.4.0
- sphinx==5.3.0
- sphinx-rtd-theme==2.0.0
- sphinxcontrib-applehelp==1.0.2
- sphinxcontrib-devhelp==1.0.2
- sphinxcontrib-htmlhelp==2.0.0
- sphinxcontrib-jquery==4.1
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==1.0.3
- sphinxcontrib-serializinghtml==1.1.5
- tblib==1.7.0
- tomli==1.2.3
- toolz==0.12.0
- tornado==6.1
- typing-extensions==4.1.1
- urllib3==1.26.20
- werkzeug==2.0.3
- wrapt==1.16.0
- xmltodict==0.14.2
- yarl==1.7.2
- zict==2.1.0
- zipp==3.6.0
prefix: /opt/conda/envs/dask
| [
"dask/array/tests/test_array_core.py::test_roll[0-shift3-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[0-shift3-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[0-shift4-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[0-shift4-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[1-shift3-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[1-shift3-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[1-shift4-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[1-shift4-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[-1-shift3-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[-1-shift3-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[-1-shift4-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[-1-shift4-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[axis4-3-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[axis4-3-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[axis4-7-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[axis4-7-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[axis4-9-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[axis4-9-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[axis5-3-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[axis5-3-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[axis5-7-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[axis5-7-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[axis5-9-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[axis5-9-chunks1]",
"dask/array/tests/test_array_core.py::test_tile[0-shape0-chunks0]",
"dask/array/tests/test_array_core.py::test_tile[0-shape1-chunks1]",
"dask/array/tests/test_array_core.py::test_tile[1-shape0-chunks0]",
"dask/array/tests/test_array_core.py::test_tile[1-shape1-chunks1]",
"dask/array/tests/test_array_core.py::test_tile[2-shape0-chunks0]",
"dask/array/tests/test_array_core.py::test_tile[2-shape1-chunks1]",
"dask/array/tests/test_array_core.py::test_tile[3-shape0-chunks0]",
"dask/array/tests/test_array_core.py::test_tile[3-shape1-chunks1]",
"dask/array/tests/test_array_core.py::test_tile[5-shape0-chunks0]",
"dask/array/tests/test_array_core.py::test_tile[5-shape1-chunks1]",
"dask/array/tests/test_array_core.py::test_tile_neg_reps[-1-shape0-chunks0]",
"dask/array/tests/test_array_core.py::test_tile_neg_reps[-1-shape1-chunks1]",
"dask/array/tests/test_array_core.py::test_tile_neg_reps[-5-shape0-chunks0]",
"dask/array/tests/test_array_core.py::test_tile_neg_reps[-5-shape1-chunks1]",
"dask/array/tests/test_array_core.py::test_tile_array_reps[reps0-shape0-chunks0]",
"dask/array/tests/test_array_core.py::test_tile_array_reps[reps0-shape1-chunks1]",
"dask/array/tests/test_array_core.py::test_tile_array_reps[reps1-shape0-chunks0]",
"dask/array/tests/test_array_core.py::test_tile_array_reps[reps1-shape1-chunks1]",
"dask/array/tests/test_array_core.py::test_elemwise_with_lists[other0-100]",
"dask/array/tests/test_array_core.py::test_elemwise_with_lists[other0-6]",
"dask/array/tests/test_array_core.py::test_elemwise_with_lists[other1-100]",
"dask/array/tests/test_array_core.py::test_elemwise_with_lists[other1-6]",
"dask/array/tests/test_array_core.py::test_elemwise_with_lists[other2-100]",
"dask/array/tests/test_array_core.py::test_elemwise_with_lists[other2-6]"
]
| [
"dask/array/tests/test_array_core.py::test_field_access",
"dask/array/tests/test_array_core.py::test_field_access_with_shape",
"dask/array/tests/test_array_core.py::test_from_array_names",
"dask/array/tests/test_array_core.py::test_setitem_mixed_d"
]
| [
"dask/array/tests/test_array_core.py::test_getem",
"dask/array/tests/test_array_core.py::test_top",
"dask/array/tests/test_array_core.py::test_top_supports_broadcasting_rules",
"dask/array/tests/test_array_core.py::test_concatenate3_on_scalars",
"dask/array/tests/test_array_core.py::test_chunked_dot_product",
"dask/array/tests/test_array_core.py::test_chunked_transpose_plus_one",
"dask/array/tests/test_array_core.py::test_transpose",
"dask/array/tests/test_array_core.py::test_broadcast_dimensions_works_with_singleton_dimensions",
"dask/array/tests/test_array_core.py::test_broadcast_dimensions",
"dask/array/tests/test_array_core.py::test_Array",
"dask/array/tests/test_array_core.py::test_uneven_chunks",
"dask/array/tests/test_array_core.py::test_numblocks_suppoorts_singleton_block_dims",
"dask/array/tests/test_array_core.py::test_keys",
"dask/array/tests/test_array_core.py::test_Array_computation",
"dask/array/tests/test_array_core.py::test_stack",
"dask/array/tests/test_array_core.py::test_short_stack",
"dask/array/tests/test_array_core.py::test_stack_scalars",
"dask/array/tests/test_array_core.py::test_stack_rechunk",
"dask/array/tests/test_array_core.py::test_concatenate",
"dask/array/tests/test_array_core.py::test_concatenate_rechunk",
"dask/array/tests/test_array_core.py::test_concatenate_fixlen_strings",
"dask/array/tests/test_array_core.py::test_vstack",
"dask/array/tests/test_array_core.py::test_hstack",
"dask/array/tests/test_array_core.py::test_dstack",
"dask/array/tests/test_array_core.py::test_take",
"dask/array/tests/test_array_core.py::test_compress",
"dask/array/tests/test_array_core.py::test_binops",
"dask/array/tests/test_array_core.py::test_isnull",
"dask/array/tests/test_array_core.py::test_isclose",
"dask/array/tests/test_array_core.py::test_broadcast_shapes",
"dask/array/tests/test_array_core.py::test_elemwise_on_scalars",
"dask/array/tests/test_array_core.py::test_partial_by_order",
"dask/array/tests/test_array_core.py::test_elemwise_with_ndarrays",
"dask/array/tests/test_array_core.py::test_elemwise_differently_chunked",
"dask/array/tests/test_array_core.py::test_operators",
"dask/array/tests/test_array_core.py::test_operator_dtype_promotion",
"dask/array/tests/test_array_core.py::test_tensordot",
"dask/array/tests/test_array_core.py::test_dot_method",
"dask/array/tests/test_array_core.py::test_T",
"dask/array/tests/test_array_core.py::test_norm",
"dask/array/tests/test_array_core.py::test_choose",
"dask/array/tests/test_array_core.py::test_where",
"dask/array/tests/test_array_core.py::test_where_has_informative_error",
"dask/array/tests/test_array_core.py::test_coarsen",
"dask/array/tests/test_array_core.py::test_coarsen_with_excess",
"dask/array/tests/test_array_core.py::test_insert",
"dask/array/tests/test_array_core.py::test_multi_insert",
"dask/array/tests/test_array_core.py::test_broadcast_to",
"dask/array/tests/test_array_core.py::test_ravel",
"dask/array/tests/test_array_core.py::test_roll[None-3-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[None-3-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[None-7-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[None-7-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[None-9-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[None-9-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[None-shift3-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[None-shift3-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[None-shift4-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[None-shift4-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[0-3-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[0-3-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[0-7-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[0-7-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[0-9-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[0-9-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[1-3-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[1-3-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[1-7-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[1-7-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[1-9-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[1-9-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[-1-3-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[-1-3-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[-1-7-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[-1-7-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[-1-9-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[-1-9-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[axis4-shift3-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[axis4-shift3-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[axis4-shift4-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[axis4-shift4-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[axis5-shift3-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[axis5-shift3-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[axis5-shift4-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[axis5-shift4-chunks1]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape0-new_shape0-chunks0]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape1-new_shape1-5]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape2-new_shape2-5]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape3-new_shape3-12]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape4-new_shape4-12]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape5-new_shape5-chunks5]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape6-new_shape6-4]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape7-new_shape7-4]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape8-new_shape8-4]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape9-new_shape9-2]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape10-new_shape10-2]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape11-new_shape11-2]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape12-new_shape12-2]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape13-new_shape13-2]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape14-new_shape14-2]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape15-new_shape15-2]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape16-new_shape16-chunks16]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape17-new_shape17-3]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape18-new_shape18-4]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape19-new_shape19-chunks19]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape20-new_shape20-1]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape21-new_shape21-1]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape22-new_shape22-24]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape23-new_shape23-6]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape24-new_shape24-6]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape25-new_shape25-6]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape26-new_shape26-chunks26]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape27-new_shape27-chunks27]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape28-new_shape28-chunks28]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape29-new_shape29-chunks29]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape30-new_shape30-chunks30]",
"dask/array/tests/test_array_core.py::test_reshape_exceptions",
"dask/array/tests/test_array_core.py::test_reshape_splat",
"dask/array/tests/test_array_core.py::test_reshape_fails_for_dask_only",
"dask/array/tests/test_array_core.py::test_reshape_unknown_dimensions",
"dask/array/tests/test_array_core.py::test_full",
"dask/array/tests/test_array_core.py::test_map_blocks",
"dask/array/tests/test_array_core.py::test_map_blocks2",
"dask/array/tests/test_array_core.py::test_map_blocks_with_constants",
"dask/array/tests/test_array_core.py::test_map_blocks_with_kwargs",
"dask/array/tests/test_array_core.py::test_map_blocks_with_chunks",
"dask/array/tests/test_array_core.py::test_map_blocks_dtype_inference",
"dask/array/tests/test_array_core.py::test_fromfunction",
"dask/array/tests/test_array_core.py::test_from_function_requires_block_args",
"dask/array/tests/test_array_core.py::test_repr",
"dask/array/tests/test_array_core.py::test_slicing_with_ellipsis",
"dask/array/tests/test_array_core.py::test_slicing_with_ndarray",
"dask/array/tests/test_array_core.py::test_dtype",
"dask/array/tests/test_array_core.py::test_blockdims_from_blockshape",
"dask/array/tests/test_array_core.py::test_coerce",
"dask/array/tests/test_array_core.py::test_store",
"dask/array/tests/test_array_core.py::test_store_regions",
"dask/array/tests/test_array_core.py::test_store_compute_false",
"dask/array/tests/test_array_core.py::test_store_locks",
"dask/array/tests/test_array_core.py::test_np_array_with_zero_dimensions",
"dask/array/tests/test_array_core.py::test_unique",
"dask/array/tests/test_array_core.py::test_dtype_complex",
"dask/array/tests/test_array_core.py::test_astype",
"dask/array/tests/test_array_core.py::test_arithmetic",
"dask/array/tests/test_array_core.py::test_elemwise_consistent_names",
"dask/array/tests/test_array_core.py::test_optimize",
"dask/array/tests/test_array_core.py::test_slicing_with_non_ndarrays",
"dask/array/tests/test_array_core.py::test_getarray",
"dask/array/tests/test_array_core.py::test_squeeze",
"dask/array/tests/test_array_core.py::test_size",
"dask/array/tests/test_array_core.py::test_nbytes",
"dask/array/tests/test_array_core.py::test_itemsize",
"dask/array/tests/test_array_core.py::test_Array_normalizes_dtype",
"dask/array/tests/test_array_core.py::test_args",
"dask/array/tests/test_array_core.py::test_from_array_with_lock",
"dask/array/tests/test_array_core.py::test_from_array_slicing_results_in_ndarray",
"dask/array/tests/test_array_core.py::test_asarray",
"dask/array/tests/test_array_core.py::test_from_func",
"dask/array/tests/test_array_core.py::test_topk",
"dask/array/tests/test_array_core.py::test_topk_k_bigger_than_chunk",
"dask/array/tests/test_array_core.py::test_bincount",
"dask/array/tests/test_array_core.py::test_bincount_with_weights",
"dask/array/tests/test_array_core.py::test_bincount_raises_informative_error_on_missing_minlength_kwarg",
"dask/array/tests/test_array_core.py::test_digitize",
"dask/array/tests/test_array_core.py::test_histogram",
"dask/array/tests/test_array_core.py::test_histogram_alternative_bins_range",
"dask/array/tests/test_array_core.py::test_histogram_return_type",
"dask/array/tests/test_array_core.py::test_histogram_extra_args_and_shapes",
"dask/array/tests/test_array_core.py::test_concatenate3_2",
"dask/array/tests/test_array_core.py::test_map_blocks3",
"dask/array/tests/test_array_core.py::test_from_array_with_missing_chunks",
"dask/array/tests/test_array_core.py::test_cache",
"dask/array/tests/test_array_core.py::test_take_dask_from_numpy",
"dask/array/tests/test_array_core.py::test_normalize_chunks",
"dask/array/tests/test_array_core.py::test_raise_on_no_chunks",
"dask/array/tests/test_array_core.py::test_chunks_is_immutable",
"dask/array/tests/test_array_core.py::test_raise_on_bad_kwargs",
"dask/array/tests/test_array_core.py::test_long_slice",
"dask/array/tests/test_array_core.py::test_ellipsis_slicing",
"dask/array/tests/test_array_core.py::test_point_slicing",
"dask/array/tests/test_array_core.py::test_point_slicing_with_full_slice",
"dask/array/tests/test_array_core.py::test_slice_with_floats",
"dask/array/tests/test_array_core.py::test_vindex_errors",
"dask/array/tests/test_array_core.py::test_vindex_merge",
"dask/array/tests/test_array_core.py::test_empty_array",
"dask/array/tests/test_array_core.py::test_array",
"dask/array/tests/test_array_core.py::test_cov",
"dask/array/tests/test_array_core.py::test_corrcoef",
"dask/array/tests/test_array_core.py::test_memmap",
"dask/array/tests/test_array_core.py::test_to_npy_stack",
"dask/array/tests/test_array_core.py::test_view",
"dask/array/tests/test_array_core.py::test_view_fortran",
"dask/array/tests/test_array_core.py::test_map_blocks_with_changed_dimension",
"dask/array/tests/test_array_core.py::test_broadcast_chunks",
"dask/array/tests/test_array_core.py::test_chunks_error",
"dask/array/tests/test_array_core.py::test_array_compute_forward_kwargs",
"dask/array/tests/test_array_core.py::test_dont_fuse_outputs",
"dask/array/tests/test_array_core.py::test_dont_dealias_outputs",
"dask/array/tests/test_array_core.py::test_timedelta_op",
"dask/array/tests/test_array_core.py::test_to_delayed",
"dask/array/tests/test_array_core.py::test_cumulative",
"dask/array/tests/test_array_core.py::test_eye",
"dask/array/tests/test_array_core.py::test_diag",
"dask/array/tests/test_array_core.py::test_tril_triu",
"dask/array/tests/test_array_core.py::test_tril_triu_errors",
"dask/array/tests/test_array_core.py::test_atop_names",
"dask/array/tests/test_array_core.py::test_atop_new_axes",
"dask/array/tests/test_array_core.py::test_atop_kwargs",
"dask/array/tests/test_array_core.py::test_atop_chunks",
"dask/array/tests/test_array_core.py::test_from_delayed",
"dask/array/tests/test_array_core.py::test_A_property",
"dask/array/tests/test_array_core.py::test_copy_mutate",
"dask/array/tests/test_array_core.py::test_npartitions",
"dask/array/tests/test_array_core.py::test_astype_gh1151",
"dask/array/tests/test_array_core.py::test_elemwise_name",
"dask/array/tests/test_array_core.py::test_map_blocks_name",
"dask/array/tests/test_array_core.py::test_array_picklable",
"dask/array/tests/test_array_core.py::test_swapaxes",
"dask/array/tests/test_array_core.py::test_from_array_raises_on_bad_chunks",
"dask/array/tests/test_array_core.py::test_concatenate_axes",
"dask/array/tests/test_array_core.py::test_atop_concatenate",
"dask/array/tests/test_array_core.py::test_common_blockdim",
"dask/array/tests/test_array_core.py::test_uneven_chunks_that_fit_neatly",
"dask/array/tests/test_array_core.py::test_elemwise_uneven_chunks",
"dask/array/tests/test_array_core.py::test_uneven_chunks_atop",
"dask/array/tests/test_array_core.py::test_warn_bad_rechunking",
"dask/array/tests/test_array_core.py::test_optimize_fuse_keys",
"dask/array/tests/test_array_core.py::test_round",
"dask/array/tests/test_array_core.py::test_repeat",
"dask/array/tests/test_array_core.py::test_concatenate_stack_dont_warn",
"dask/array/tests/test_array_core.py::test_map_blocks_delayed",
"dask/array/tests/test_array_core.py::test_no_chunks",
"dask/array/tests/test_array_core.py::test_no_chunks_2d",
"dask/array/tests/test_array_core.py::test_no_chunks_yes_chunks",
"dask/array/tests/test_array_core.py::test_raise_informative_errors_no_chunks",
"dask/array/tests/test_array_core.py::test_no_chunks_slicing_2d",
"dask/array/tests/test_array_core.py::test_index_array_with_array_1d",
"dask/array/tests/test_array_core.py::test_index_array_with_array_2d",
"dask/array/tests/test_array_core.py::test_setitem_1d",
"dask/array/tests/test_array_core.py::test_setitem_2d",
"dask/array/tests/test_array_core.py::test_setitem_errs",
"dask/array/tests/test_array_core.py::test_zero_slice_dtypes",
"dask/array/tests/test_array_core.py::test_zero_sized_array_rechunk",
"dask/array/tests/test_array_core.py::test_atop_zero_shape",
"dask/array/tests/test_array_core.py::test_atop_zero_shape_new_axes",
"dask/array/tests/test_array_core.py::test_broadcast_against_zero_shape",
"dask/array/tests/test_array_core.py::test_fast_from_array",
"dask/array/tests/test_array_core.py::test_random_from_array",
"dask/array/tests/test_array_core.py::test_concatenate_errs",
"dask/array/tests/test_array_core.py::test_stack_errs",
"dask/array/tests/test_array_core.py::test_transpose_negative_axes",
"dask/array/tests/test_array_core.py::test_atop_with_numpy_arrays",
"dask/tests/test_threaded.py::test_get",
"dask/tests/test_threaded.py::test_nested_get",
"dask/tests/test_threaded.py::test_get_without_computation",
"dask/tests/test_threaded.py::test_exceptions_rise_to_top",
"dask/tests/test_threaded.py::test_reuse_pool",
"dask/tests/test_threaded.py::test_threaded_within_thread",
"dask/tests/test_threaded.py::test_dont_spawn_too_many_threads",
"dask/tests/test_threaded.py::test_thread_safety"
]
| []
| BSD 3-Clause "New" or "Revised" License | 1,131 | [
"docs/source/array-api.rst",
"docs/source/debugging.rst",
"docs/source/index.rst",
".travis.yml",
"dask/array/core.py",
"dask/array/__init__.py",
"appveyor.yml",
"dask/async.py"
]
| [
"docs/source/array-api.rst",
"docs/source/debugging.rst",
"docs/source/index.rst",
".travis.yml",
"dask/array/core.py",
"dask/array/__init__.py",
"appveyor.yml",
"dask/async.py"
]
|
Azure__WALinuxAgent-638 | 7676bf85131673d4a02dcc43b18cdf24bb3a36ab | 2017-03-30 23:31:41 | 6e9b985c1d7d564253a1c344bab01b45093103cd | diff --git a/azurelinuxagent/common/osutil/factory.py b/azurelinuxagent/common/osutil/factory.py
index acd7f6ed..eee9f97a 100644
--- a/azurelinuxagent/common/osutil/factory.py
+++ b/azurelinuxagent/common/osutil/factory.py
@@ -76,6 +76,9 @@ def get_osutil(distro_name=DISTRO_NAME,
else:
return RedhatOSUtil()
+ elif distro_name == "euleros":
+ return RedhatOSUtil()
+
elif distro_name == "freebsd":
return FreeBSDOSUtil()
diff --git a/azurelinuxagent/common/protocol/hostplugin.py b/azurelinuxagent/common/protocol/hostplugin.py
index f4ece298..d76326d4 100644
--- a/azurelinuxagent/common/protocol/hostplugin.py
+++ b/azurelinuxagent/common/protocol/hostplugin.py
@@ -38,6 +38,8 @@ MAXIMUM_PAGEBLOB_PAGE_SIZE = 4 * 1024 * 1024 # Max page size: 4MB
class HostPluginProtocol(object):
+ _is_default_channel = False
+
def __init__(self, endpoint, container_id, role_config_name):
if endpoint is None:
raise ProtocolError("HostGAPlugin: Endpoint not provided")
@@ -50,6 +52,14 @@ class HostPluginProtocol(object):
self.role_config_name = role_config_name
self.manifest_uri = None
+ @staticmethod
+ def is_default_channel():
+ return HostPluginProtocol._is_default_channel
+
+ @staticmethod
+ def set_default_channel(is_default):
+ HostPluginProtocol._is_default_channel = is_default
+
def ensure_initialized(self):
if not self.is_initialized:
self.api_versions = self.get_api_versions()
@@ -74,7 +84,7 @@ class HostPluginProtocol(object):
if response.status != httpclient.OK:
logger.error(
"HostGAPlugin: Failed Get API versions: {0}".format(
- self._read_response_error(response)))
+ self.read_response_error(response)))
else:
return_val = ustr(remove_bom(response.read()), encoding='utf-8')
@@ -165,7 +175,7 @@ class HostPluginProtocol(object):
except Exception as e:
logger.error("HostGAPlugin: Exception Put VM status: {0}", e)
-
+
def _put_block_blob_status(self, sas_url, status_blob):
url = URI_FORMAT_PUT_VM_STATUS.format(self.endpoint, HOST_PLUGIN_PORT)
@@ -178,7 +188,7 @@ class HostPluginProtocol(object):
if response.status != httpclient.OK:
raise HttpError("HostGAPlugin: Put BlockBlob failed: {0}".format(
- self._read_response_error(response)))
+ self.read_response_error(response)))
else:
logger.verbose("HostGAPlugin: Put BlockBlob status succeeded")
@@ -200,7 +210,7 @@ class HostPluginProtocol(object):
if response.status != httpclient.OK:
raise HttpError(
"HostGAPlugin: Failed PageBlob clean-up: {0}".format(
- self._read_response_error(response)))
+ self.read_response_error(response)))
else:
logger.verbose("HostGAPlugin: PageBlob clean-up succeeded")
@@ -231,7 +241,7 @@ class HostPluginProtocol(object):
raise HttpError(
"HostGAPlugin Error: Put PageBlob bytes [{0},{1}]: " \
"{2}".format(
- start, end, self._read_response_error(response)))
+ start, end, self.read_response_error(response)))
# Advance to the next page (if any)
start = end
@@ -266,9 +276,12 @@ class HostPluginProtocol(object):
return s.decode('utf-8')
return s
- def _read_response_error(self, response):
+ @staticmethod
+ def read_response_error(response):
+ if response is None:
+ return ''
body = remove_bom(response.read())
- if PY_VERSION_MAJOR < 3:
+ if PY_VERSION_MAJOR < 3 and body is not None:
body = ustr(body, encoding='utf-8')
return "{0}, {1}, {2}".format(
response.status,
diff --git a/azurelinuxagent/common/protocol/wire.py b/azurelinuxagent/common/protocol/wire.py
index db343909..051dfa1b 100644
--- a/azurelinuxagent/common/protocol/wire.py
+++ b/azurelinuxagent/common/protocol/wire.py
@@ -370,11 +370,14 @@ class StatusBlob(object):
__storage_version__ = "2014-02-14"
+ def prepare(self, blob_type):
+ logger.verbose("Prepare status blob")
+ self.data = self.to_json()
+ self.type = blob_type
+
def upload(self, url):
# TODO upload extension only if content has changed
- logger.verbose("Upload status blob")
upload_successful = False
- self.data = self.to_json()
self.type = self.get_blob_type(url)
try:
if self.type == "BlockBlob":
@@ -635,9 +638,14 @@ class WireClient(object):
def fetch_manifest(self, version_uris):
logger.verbose("Fetch manifest")
for version in version_uris:
- response = self.fetch(version.uri)
+ response = None
+ if not HostPluginProtocol.is_default_channel():
+ response = self.fetch(version.uri)
if not response:
- logger.verbose("Manifest could not be downloaded, falling back to host plugin")
+ if HostPluginProtocol.is_default_channel():
+ logger.verbose("Using host plugin as default channel")
+ else:
+ logger.verbose("Manifest could not be downloaded, falling back to host plugin")
host = self.get_host_plugin()
uri, headers = host.get_artifact_request(version.uri)
response = self.fetch(uri, headers)
@@ -649,6 +657,9 @@ class WireClient(object):
else:
host.manifest_uri = version.uri
logger.verbose("Manifest downloaded successfully from host plugin")
+ if not HostPluginProtocol.is_default_channel():
+ logger.info("Setting host plugin as default channel")
+ HostPluginProtocol.set_default_channel(True)
if response:
return response
raise ProtocolError("Failed to fetch manifest from all sources")
@@ -664,12 +675,11 @@ class WireClient(object):
if resp.status == httpclient.OK:
return_value = self.decode_config(resp.read())
else:
- logger.warn("Could not fetch {0} [{1}: {2}]",
+ logger.warn("Could not fetch {0} [{1}]",
uri,
- resp.status,
- resp.reason)
+ HostPluginProtocol.read_response_error(resp))
except (HttpError, ProtocolError) as e:
- logger.verbose("Fetch failed from [{0}]", uri)
+ logger.verbose("Fetch failed from [{0}]: {1}", uri, e)
return return_value
def update_hosting_env(self, goal_state):
@@ -840,10 +850,12 @@ class WireClient(object):
if ext_conf.status_upload_blob is not None:
uploaded = False
try:
- uploaded = self.status_blob.upload(ext_conf.status_upload_blob)
- self.report_blob_type(self.status_blob.type,
- ext_conf.status_upload_blob_type)
- except (HttpError, ProtocolError) as e:
+ self.status_blob.prepare(ext_conf.status_upload_blob_type)
+ if not HostPluginProtocol.is_default_channel():
+ uploaded = self.status_blob.upload(ext_conf.status_upload_blob)
+ self.report_blob_type(self.status_blob.type,
+ ext_conf.status_upload_blob_type)
+ except (HttpError, ProtocolError):
# errors have already been logged
pass
if not uploaded:
@@ -851,6 +863,9 @@ class WireClient(object):
host.put_vm_status(self.status_blob,
ext_conf.status_upload_blob,
ext_conf.status_upload_blob_type)
+ if not HostPluginProtocol.is_default_channel():
+ logger.info("Setting host plugin as default channel")
+ HostPluginProtocol.set_default_channel(True)
"""
Emit an event to determine if the type in the extension config
diff --git a/azurelinuxagent/common/utils/restutil.py b/azurelinuxagent/common/utils/restutil.py
index 71973705..49d2d68f 100644
--- a/azurelinuxagent/common/utils/restutil.py
+++ b/azurelinuxagent/common/utils/restutil.py
@@ -28,7 +28,7 @@ from azurelinuxagent.common.future import httpclient, urlparse
REST api util functions
"""
-RETRY_WAITING_INTERVAL = 10
+RETRY_WAITING_INTERVAL = 3
secure_warning = True
diff --git a/azurelinuxagent/common/version.py b/azurelinuxagent/common/version.py
index 0e3818aa..8ee2c18f 100644
--- a/azurelinuxagent/common/version.py
+++ b/azurelinuxagent/common/version.py
@@ -76,6 +76,9 @@ def get_distro():
osinfo[2] = "oracle"
osinfo[3] = "Oracle Linux"
+ if os.path.exists("/etc/euleros-release"):
+ osinfo[0] = "euleros"
+
# The platform.py lib has issue with detecting BIG-IP linux distribution.
# Merge the following patch provided by F5.
if os.path.exists("/shared/vadc"):
diff --git a/azurelinuxagent/daemon/resourcedisk/default.py b/azurelinuxagent/daemon/resourcedisk/default.py
index 21de38f0..2b116fb1 100644
--- a/azurelinuxagent/daemon/resourcedisk/default.py
+++ b/azurelinuxagent/daemon/resourcedisk/default.py
@@ -85,6 +85,11 @@ class ResourceDiskHandler(object):
except ResourceDiskError as e:
logger.error("Failed to enable swap {0}", e)
+ def reread_partition_table(self, device):
+ if shellutil.run("sfdisk -R {0}".format(device), chk_err=False):
+ shellutil.run("blockdev --rereadpt {0}".format(device),
+ chk_err=False)
+
def mount_resource_disk(self, mount_point):
device = self.osutil.device_for_ide_port(1)
if device is None:
@@ -138,12 +143,13 @@ class ResourceDiskHandler(object):
shellutil.run(mkfs_string)
else:
logger.info("GPT not detected, determining filesystem")
- ret = self.change_partition_type(suppress_message=True, option_str="{0} 1".format(device))
+ ret = self.change_partition_type(suppress_message=True, option_str="{0} 1 -n".format(device))
ptype = ret[1].strip()
if ptype == "7" and self.fs != "ntfs":
logger.info("The partition is formatted with ntfs, updating "
"partition type to 83")
self.change_partition_type(suppress_message=False, option_str="{0} 1 83".format(device))
+ self.reread_partition_table(device)
logger.info("Format partition [{0}]", mkfs_string)
shellutil.run(mkfs_string)
else:
@@ -166,8 +172,8 @@ class ResourceDiskHandler(object):
logger.info("Mount resource disk [{0}]", mount_string)
ret, output = shellutil.run_get_output(mount_string, chk_err=False)
- # if the exit code is 32, then the resource disk is already mounted
- if ret == 32:
+ # if the exit code is 32, then the resource disk can be already mounted
+ if ret == 32 and output.find("is already mounted") != -1:
logger.warn("Could not mount resource disk: {0}", output)
elif ret != 0:
# Some kernels seem to issue an async partition re-read after a
@@ -178,9 +184,7 @@ class ResourceDiskHandler(object):
logger.warn("Failed to mount resource disk. "
"Retry mounting after re-reading partition info.")
- if shellutil.run("sfdisk -R {0}".format(device), chk_err=False):
- shellutil.run("blockdev --rereadpt {0}".format(device),
- chk_err=False)
+ self.reread_partition_table(device)
ret, output = shellutil.run_get_output(mount_string)
if ret:
diff --git a/azurelinuxagent/ga/update.py b/azurelinuxagent/ga/update.py
index 59bc70c5..a5cbfdbf 100644
--- a/azurelinuxagent/ga/update.py
+++ b/azurelinuxagent/ga/update.py
@@ -40,6 +40,7 @@ from azurelinuxagent.common.exception import UpdateError, ProtocolError
from azurelinuxagent.common.future import ustr
from azurelinuxagent.common.osutil import get_osutil
from azurelinuxagent.common.protocol import get_protocol_util
+from azurelinuxagent.common.protocol.hostplugin import HostPluginProtocol
from azurelinuxagent.common.utils.flexible_version import FlexibleVersion
from azurelinuxagent.common.version import AGENT_NAME, AGENT_VERSION, AGENT_LONG_VERSION, \
AGENT_DIR_GLOB, AGENT_PKG_GLOB, \
@@ -49,7 +50,6 @@ from azurelinuxagent.common.version import AGENT_NAME, AGENT_VERSION, AGENT_LONG
from azurelinuxagent.ga.exthandlers import HandlerManifest
-
AGENT_ERROR_FILE = "error.json" # File name for agent error record
AGENT_MANIFEST_FILE = "HandlerManifest.json"
@@ -698,18 +698,26 @@ class GuestAgent(object):
def _download(self):
for uri in self.pkg.uris:
- if self._fetch(uri.uri):
+ if not HostPluginProtocol.is_default_channel() and self._fetch(uri.uri):
break
- else:
- if self.host is not None and self.host.ensure_initialized():
+ elif self.host is not None and self.host.ensure_initialized():
+ if not HostPluginProtocol.is_default_channel():
logger.warn("Download unsuccessful, falling back to host plugin")
- uri, headers = self.host.get_artifact_request(uri.uri, self.host.manifest_uri)
- if uri is not None \
- and headers is not None \
- and self._fetch(uri, headers=headers):
- break
else:
- logger.warn("Download unsuccessful, host plugin not available")
+ logger.verbose("Using host plugin as default channel")
+
+ uri, headers = self.host.get_artifact_request(uri.uri, self.host.manifest_uri)
+ if uri is not None \
+ and headers is not None \
+ and self._fetch(uri, headers=headers):
+ if not HostPluginProtocol.is_default_channel():
+ logger.verbose("Setting host plugin as default channel")
+ HostPluginProtocol.set_default_channel(True)
+ break
+ else:
+ logger.warn("Host plugin download unsuccessful")
+ else:
+ logger.error("No download channels available")
if not os.path.isfile(self.get_agent_pkg_path()):
msg = u"Unable to download Agent {0} from any URI".format(self.name)
@@ -732,6 +740,9 @@ class GuestAgent(object):
bytearray(package),
asbin=True)
logger.info(u"Agent {0} downloaded from {1}", self.name, uri)
+ else:
+ logger.verbose("Fetch was unsuccessful [{0}]",
+ HostPluginProtocol.read_response_error(resp))
except restutil.HttpError as http_error:
logger.verbose(u"Agent {0} download from {1} failed [{2}]",
self.name,
diff --git a/azurelinuxagent/pa/rdma/centos.py b/azurelinuxagent/pa/rdma/centos.py
index 214f9ead..942d225f 100644
--- a/azurelinuxagent/pa/rdma/centos.py
+++ b/azurelinuxagent/pa/rdma/centos.py
@@ -82,7 +82,7 @@ class CentOSRDMAHandler(RDMAHandler):
# Example match (pkg name, -, followed by 3 segments, fw_version and -):
# - pkg=microsoft-hyper-v-rdma-4.1.0.142-20160323.x86_64
# - fw_version=142
- pattern = '{0}-\d\.\d\.\d\.({1})-'.format(
+ pattern = '{0}-(\d+\.){3,}({1})-'.format(
self.rdma_user_mode_package_name, fw_version)
return re.match(pattern, pkg)
@@ -156,7 +156,7 @@ class CentOSRDMAHandler(RDMAHandler):
# Install kernel mode driver (kmod-microsoft-hyper-v-rdma-*)
kmod_pkg = self.get_file_by_pattern(
- pkgs, "%s-\d\.\d\.\d\.+(%s)-\d{8}\.x86_64.rpm" % (self.rdma_kernel_mode_package_name, fw_version))
+ pkgs, "%s-(\d+\.){3,}(%s)-\d{8}\.x86_64.rpm" % (self.rdma_kernel_mode_package_name, fw_version))
if not kmod_pkg:
raise Exception("RDMA kernel mode package not found")
kmod_pkg_path = os.path.join(pkg_dir, kmod_pkg)
@@ -165,7 +165,7 @@ class CentOSRDMAHandler(RDMAHandler):
# Install user mode driver (microsoft-hyper-v-rdma-*)
umod_pkg = self.get_file_by_pattern(
- pkgs, "%s-\d\.\d\.\d\.+(%s)-\d{8}\.x86_64.rpm" % (self.rdma_user_mode_package_name, fw_version))
+ pkgs, "%s-(\d+\.){3,}(%s)-\d{8}\.x86_64.rpm" % (self.rdma_user_mode_package_name, fw_version))
if not umod_pkg:
raise Exception("RDMA user mode package not found")
umod_pkg_path = os.path.join(pkg_dir, umod_pkg)
diff --git a/bin/waagent2.0 b/bin/waagent2.0
index 80af1c73..1a72ba73 100644
--- a/bin/waagent2.0
+++ b/bin/waagent2.0
@@ -1093,6 +1093,18 @@ class centosDistro(redhatDistro):
def __init__(self):
super(centosDistro,self).__init__()
+############################################################
+# eulerosDistro
+############################################################
+
+class eulerosDistro(redhatDistro):
+ """
+ EulerOS Distro concrete class
+ Put EulerOS specific behavior here...
+ """
+ def __init__(self):
+ super(eulerosDistro,self).__init__()
+
############################################################
# oracleDistro
############################################################
@@ -6016,6 +6028,8 @@ def DistInfo(fullname=0):
if 'linux_distribution' in dir(platform):
distinfo = list(platform.linux_distribution(full_distribution_name=fullname))
distinfo[0] = distinfo[0].strip() # remove trailing whitespace in distro name
+ if os.path.exists("/etc/euleros-release"):
+ distinfo[0] = "euleros"
return distinfo
else:
return platform.dist()
diff --git a/config/66-azure-storage.rules b/config/66-azure-storage.rules
index ab306288..5b2b799f 100644
--- a/config/66-azure-storage.rules
+++ b/config/66-azure-storage.rules
@@ -1,12 +1,22 @@
-ACTION!="add|change", GOTO="azure_end"
-SUBSYSTEM!="block", GOTO="azure_end"
-ATTRS{ID_VENDOR}!="Msft", GOTO="azure_end"
-ATTRS{ID_MODEL}!="Virtual_Disk", GOTO="azure_end"
+ACTION=="add|change", SUBSYSTEM=="block", ENV{ID_VENDOR}=="Msft", ENV{ID_MODEL}=="Virtual_Disk", GOTO="azure_disk"
+GOTO="azure_end"
+LABEL="azure_disk"
# Root has a GUID of 0000 as the second value
# The resource/resource has GUID of 0001 as the second value
ATTRS{device_id}=="?00000000-0000-*", ENV{fabric_name}="root", GOTO="azure_names"
ATTRS{device_id}=="?00000000-0001-*", ENV{fabric_name}="resource", GOTO="azure_names"
+# Wellknown SCSI controllers
+ATTRS{device_id}=="{f8b3781a-1e82-4818-a1c3-63d806ec15bb}", ENV{fabric_scsi_controller}="scsi0", GOTO="azure_datadisk"
+ATTRS{device_id}=="{f8b3781b-1e82-4818-a1c3-63d806ec15bb}", ENV{fabric_scsi_controller}="scsi1", GOTO="azure_datadisk"
+ATTRS{device_id}=="{f8b3781c-1e82-4818-a1c3-63d806ec15bb}", ENV{fabric_scsi_controller}="scsi2", GOTO="azure_datadisk"
+ATTRS{device_id}=="{f8b3781d-1e82-4818-a1c3-63d806ec15bb}", ENV{fabric_scsi_controller}="scsi3", GOTO="azure_datadisk"
+GOTO="azure_end"
+
+# Retrieve LUN number for datadisks
+LABEL="azure_datadisk"
+ENV{DEVTYPE}=="partition", PROGRAM="/bin/sh -c 'readlink /sys/class/block/%k/../device|cut -d: -f4'", ENV{fabric_name}="$env{fabric_scsi_controller}/lun$result", GOTO="azure_names"
+PROGRAM="/bin/sh -c 'readlink /sys/class/block/%k/device|cut -d: -f4'", ENV{fabric_name}="$env{fabric_scsi_controller}/lun$result", GOTO="azure_names"
GOTO="azure_end"
# Create the symlinks
@@ -15,4 +25,3 @@ ENV{DEVTYPE}=="disk", SYMLINK+="disk/azure/$env{fabric_name}"
ENV{DEVTYPE}=="partition", SYMLINK+="disk/azure/$env{fabric_name}-part%n"
LABEL="azure_end"
-
| Host plugin fallback should be sticky
If we detect the connection to storage is not available, we should keep this state and continue using the host plugin. | Azure/WALinuxAgent | diff --git a/tests/protocol/test_hostplugin.py b/tests/protocol/test_hostplugin.py
index 078af706..8ebb468c 100644
--- a/tests/protocol/test_hostplugin.py
+++ b/tests/protocol/test_hostplugin.py
@@ -113,21 +113,24 @@ class TestHostPlugin(AgentTestCase):
def test_fallback(self):
"""
- Validate fallback to upload status using HostGAPlugin is happening when status reporting via
- default method is unsuccessful
+ Validate fallback to upload status using HostGAPlugin is happening when
+ status reporting via default method is unsuccessful
"""
test_goal_state = wire.GoalState(WireProtocolData(DATA_FILE).goal_state)
-
+ status = restapi.VMStatus(status="Ready", message="Guest Agent is running")
with patch.object(wire.HostPluginProtocol, "put_vm_status") as patch_put:
with patch.object(wire.StatusBlob, "upload", return_value=False) as patch_upload:
wire_protocol_client = wire.WireProtocol(wireserver_url).client
wire_protocol_client.get_goal_state = Mock(return_value=test_goal_state)
wire_protocol_client.ext_conf = wire.ExtensionsConfig(None)
wire_protocol_client.ext_conf.status_upload_blob = sas_url
+ wire_protocol_client.status_blob.set_vm_status(status)
wire_protocol_client.upload_status_blob()
self.assertTrue(patch_put.call_count == 1,
"Fallback was not engaged")
self.assertTrue(patch_put.call_args[0][1] == sas_url)
+ self.assertTrue(wire.HostPluginProtocol.is_default_channel())
+ wire.HostPluginProtocol.set_default_channel(False)
def test_validate_http_request(self):
"""Validate correct set of data is sent to HostGAPlugin when reporting VM status"""
@@ -161,19 +164,19 @@ class TestHostPlugin(AgentTestCase):
def test_no_fallback(self):
"""
- Validate fallback to upload status using HostGAPlugin is not happening when status reporting via
- default method is successful
+ Validate fallback to upload status using HostGAPlugin is not happening
+ when status reporting via default method is successful
"""
- with patch.object(wire.HostPluginProtocol,
- "put_vm_status") as patch_put:
+ vmstatus = restapi.VMStatus(message="Ready", status="Ready")
+ with patch.object(wire.HostPluginProtocol, "put_vm_status") as patch_put:
with patch.object(wire.StatusBlob, "upload") as patch_upload:
patch_upload.return_value = True
wire_protocol_client = wire.WireProtocol(wireserver_url).client
wire_protocol_client.ext_conf = wire.ExtensionsConfig(None)
wire_protocol_client.ext_conf.status_upload_blob = sas_url
+ wire_protocol_client.status_blob.vm_status = vmstatus
wire_protocol_client.upload_status_blob()
- self.assertTrue(patch_put.call_count == 0,
- "Fallback was engaged")
+ self.assertTrue(patch_put.call_count == 0, "Fallback was engaged")
def test_validate_block_blob(self):
"""Validate correct set of data is sent to HostGAPlugin when reporting VM status"""
diff --git a/tests/protocol/test_wire.py b/tests/protocol/test_wire.py
index e083678b..e36fc652 100644
--- a/tests/protocol/test_wire.py
+++ b/tests/protocol/test_wire.py
@@ -148,13 +148,15 @@ class TestWireProtocolGetters(AgentTestCase):
host_uri)
def test_upload_status_blob_default(self, *args):
+ vmstatus = VMStatus(message="Ready", status="Ready")
wire_protocol_client = WireProtocol(wireserver_url).client
wire_protocol_client.ext_conf = ExtensionsConfig(None)
wire_protocol_client.ext_conf.status_upload_blob = testurl
+ wire_protocol_client.status_blob.vm_status = vmstatus
with patch.object(WireClient, "get_goal_state") as patch_get_goal_state:
with patch.object(HostPluginProtocol, "put_vm_status") as patch_host_ga_plugin_upload:
- with patch.object(StatusBlob, "upload", return_value = True) as patch_default_upload:
+ with patch.object(StatusBlob, "upload", return_value=True) as patch_default_upload:
wire_protocol_client.upload_status_blob()
patch_default_upload.assert_called_once_with(testurl)
@@ -162,20 +164,24 @@ class TestWireProtocolGetters(AgentTestCase):
patch_host_ga_plugin_upload.assert_not_called()
def test_upload_status_blob_host_ga_plugin(self, *args):
+ vmstatus = VMStatus(message="Ready", status="Ready")
wire_protocol_client = WireProtocol(wireserver_url).client
wire_protocol_client.ext_conf = ExtensionsConfig(None)
wire_protocol_client.ext_conf.status_upload_blob = testurl
wire_protocol_client.ext_conf.status_upload_blob_type = testtype
+ wire_protocol_client.status_blob.vm_status = vmstatus
goal_state = GoalState(WireProtocolData(DATA_FILE).goal_state)
with patch.object(HostPluginProtocol, "put_vm_status") as patch_host_ga_plugin_upload:
with patch.object(StatusBlob, "upload", return_value=False) as patch_default_upload:
- wire_protocol_client.get_goal_state = Mock(return_value = goal_state)
+ wire_protocol_client.get_goal_state = Mock(return_value=goal_state)
wire_protocol_client.upload_status_blob()
patch_default_upload.assert_called_once_with(testurl)
wire_protocol_client.get_goal_state.assert_called_once()
patch_host_ga_plugin_upload.assert_called_once_with(wire_protocol_client.status_blob, testurl, testtype)
+ self.assertTrue(HostPluginProtocol.is_default_channel())
+ HostPluginProtocol.set_default_channel(False)
def test_get_in_vm_artifacts_profile_blob_not_available(self, *args):
wire_protocol_client = WireProtocol(wireserver_url).client
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 0,
"test_score": 3
},
"num_modified_files": 10
} | 2.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"nose",
"pyasn1",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.6",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
certifi==2021.5.30
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
nose==1.3.7
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pyasn1==0.5.1
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
-e git+https://github.com/Azure/WALinuxAgent.git@7676bf85131673d4a02dcc43b18cdf24bb3a36ab#egg=WALinuxAgent
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: WALinuxAgent
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib-metadata=4.8.1=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- nose==1.3.7
- pyasn1==0.5.1
prefix: /opt/conda/envs/WALinuxAgent
| [
"tests/protocol/test_hostplugin.py::TestHostPlugin::test_fallback",
"tests/protocol/test_wire.py::TestWireProtocolGetters::test_upload_status_blob_host_ga_plugin"
]
| [
"tests/protocol/test_wire.py::TestWireProtocolGetters::test_getters",
"tests/protocol/test_wire.py::TestWireProtocolGetters::test_getters_ext_no_public",
"tests/protocol/test_wire.py::TestWireProtocolGetters::test_getters_ext_no_settings",
"tests/protocol/test_wire.py::TestWireProtocolGetters::test_getters_no_ext"
]
| [
"tests/protocol/test_hostplugin.py::TestHostPlugin::test_no_fallback",
"tests/protocol/test_hostplugin.py::TestHostPlugin::test_validate_block_blob",
"tests/protocol/test_hostplugin.py::TestHostPlugin::test_validate_get_extension_artifacts",
"tests/protocol/test_hostplugin.py::TestHostPlugin::test_validate_http_request",
"tests/protocol/test_hostplugin.py::TestHostPlugin::test_validate_page_blobs",
"tests/protocol/test_wire.py::TestWireProtocolGetters::test_call_storage_kwargs",
"tests/protocol/test_wire.py::TestWireProtocolGetters::test_download_ext_handler_pkg_fallback",
"tests/protocol/test_wire.py::TestWireProtocolGetters::test_fetch_manifest_fallback",
"tests/protocol/test_wire.py::TestWireProtocolGetters::test_get_host_ga_plugin",
"tests/protocol/test_wire.py::TestWireProtocolGetters::test_get_in_vm_artifacts_profile_blob_not_available",
"tests/protocol/test_wire.py::TestWireProtocolGetters::test_get_in_vm_artifacts_profile_default",
"tests/protocol/test_wire.py::TestWireProtocolGetters::test_get_in_vm_artifacts_profile_host_ga_plugin",
"tests/protocol/test_wire.py::TestWireProtocolGetters::test_get_in_vm_artifacts_profile_response_body_not_valid",
"tests/protocol/test_wire.py::TestWireProtocolGetters::test_report_vm_status",
"tests/protocol/test_wire.py::TestWireProtocolGetters::test_status_blob_parsing",
"tests/protocol/test_wire.py::TestWireProtocolGetters::test_upload_status_blob_default"
]
| []
| Apache License 2.0 | 1,132 | [
"azurelinuxagent/ga/update.py",
"azurelinuxagent/daemon/resourcedisk/default.py",
"azurelinuxagent/pa/rdma/centos.py",
"azurelinuxagent/common/protocol/hostplugin.py",
"config/66-azure-storage.rules",
"azurelinuxagent/common/protocol/wire.py",
"azurelinuxagent/common/osutil/factory.py",
"azurelinuxagent/common/utils/restutil.py",
"azurelinuxagent/common/version.py",
"bin/waagent2.0"
]
| [
"azurelinuxagent/ga/update.py",
"azurelinuxagent/daemon/resourcedisk/default.py",
"azurelinuxagent/pa/rdma/centos.py",
"azurelinuxagent/common/protocol/hostplugin.py",
"config/66-azure-storage.rules",
"azurelinuxagent/common/protocol/wire.py",
"azurelinuxagent/common/osutil/factory.py",
"azurelinuxagent/common/utils/restutil.py",
"azurelinuxagent/common/version.py",
"bin/waagent2.0"
]
|
|
pypa__pip-4393 | 985dd899e023fc50f18def2ff6991a25d91ebef8 | 2017-03-31 01:31:40 | 46e41dec1f30e2576a0a10abec18fb1a2150188d | diff --git a/news/3236.bugfix b/news/3236.bugfix
new file mode 100644
index 000000000..51d358458
--- /dev/null
+++ b/news/3236.bugfix
@@ -0,0 +1,1 @@
+"Support URL-encoded characters in URL credentials."
diff --git a/pip/download.py b/pip/download.py
index 41709db5e..894d0ff9a 100644
--- a/pip/download.py
+++ b/pip/download.py
@@ -21,6 +21,7 @@ except ImportError:
from pip._vendor.six.moves.urllib import parse as urllib_parse
from pip._vendor.six.moves.urllib import request as urllib_request
+from pip._vendor.six.moves.urllib.parse import unquote as urllib_unquote
import pip
@@ -207,8 +208,9 @@ class MultiDomainBasicAuth(AuthBase):
if "@" in netloc:
userinfo = netloc.rsplit("@", 1)[0]
if ":" in userinfo:
- return userinfo.split(":", 1)
- return userinfo, None
+ user, pwd = userinfo.split(":", 1)
+ return (urllib_unquote(user), urllib_unquote(pwd))
+ return urllib_unquote(userinfo), None
return None, None
| Failure to authenticate private repository when URL-encoded character in password
See https://github.com/pypa/pip/issues/51#issuecomment-128548720
I have this issue with pip 7.1.2. There's a symbol in the password that needs url-encoding.
With the password url-encoded, pip fails to authenticate (gets a 401 response) and prompts for username/password interactively. It then works fine if I enter the user/pass interactively. Like @bcbrown I suspect it's not url-decoding the password.
| pypa/pip | diff --git a/tests/unit/test_download.py b/tests/unit/test_download.py
index ee4b11c7d..1630393f0 100644
--- a/tests/unit/test_download.py
+++ b/tests/unit/test_download.py
@@ -13,7 +13,7 @@ import pip
from pip.exceptions import HashMismatch
from pip.download import (
PipSession, SafeFileCache, path_to_url, unpack_http_url, url_to_path,
- unpack_file_url,
+ unpack_file_url, MultiDomainBasicAuth
)
from pip.index import Link
from pip.utils.hashes import Hashes
@@ -333,3 +333,14 @@ class TestPipSession:
)
assert not hasattr(session.adapters["https://example.com/"], "cache")
+
+
+def test_parse_credentials():
+ auth = MultiDomainBasicAuth()
+ assert auth.parse_credentials("foo:[email protected]") == ('foo', 'bar')
+ assert auth.parse_credentials("[email protected]") == ('foo', None)
+ assert auth.parse_credentials("example.com") == (None, None)
+
+ # URL-encoded reserved characters:
+ assert auth.parse_credentials("user%3Aname:%23%40%[email protected]") \
+ == ("user:name", "#@^")
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_added_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 1
} | 9.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-rerunfailures",
"pytest-timeout",
"pytest-xdist",
"freezegun",
"mock",
"pretend",
"pyyaml",
"setuptools>=39.2.0",
"scripttest",
"virtualenv",
"wheel"
],
"pre_install": null,
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | coverage==7.8.0
distlib==0.3.9
exceptiongroup==1.2.0
execnet==2.1.1
filelock==3.18.0
freezegun==1.5.1
iniconfig==1.1.1
mock==5.2.0
packaging==24.2
platformdirs==4.3.7
pluggy==1.5.0
pretend==1.0.9
pytest==8.3.4
pytest-cov==6.0.0
pytest-rerunfailures==15.0
pytest-timeout==2.3.1
pytest-xdist==3.6.1
python-dateutil==2.9.0.post0
PyYAML==6.0.2
scripttest==2.0
six==1.17.0
tomli==2.0.1
virtualenv==20.29.3
| name: pip
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- exceptiongroup=1.2.0=py39h06a4308_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- packaging=24.2=py39h06a4308_0
- pluggy=1.5.0=py39h06a4308_0
- pytest=8.3.4=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tomli=2.0.1=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- coverage==7.8.0
- distlib==0.3.9
- execnet==2.1.1
- filelock==3.18.0
- freezegun==1.5.1
- mock==5.2.0
- pip==10.0.0.dev0
- platformdirs==4.3.7
- pretend==1.0.9
- pytest-cov==6.0.0
- pytest-rerunfailures==15.0
- pytest-timeout==2.3.1
- pytest-xdist==3.6.1
- python-dateutil==2.9.0.post0
- pyyaml==6.0.2
- scripttest==2.0
- six==1.17.0
- virtualenv==20.29.3
prefix: /opt/conda/envs/pip
| [
"tests/unit/test_download.py::test_parse_credentials"
]
| []
| [
"tests/unit/test_download.py::test_unpack_http_url_with_urllib_response_without_content_type",
"tests/unit/test_download.py::test_user_agent",
"tests/unit/test_download.py::test_unpack_http_url_bad_downloaded_checksum",
"tests/unit/test_download.py::test_path_to_url_unix",
"tests/unit/test_download.py::test_url_to_path_unix",
"tests/unit/test_download.py::Test_unpack_file_url::test_unpack_file_url_no_download",
"tests/unit/test_download.py::Test_unpack_file_url::test_unpack_file_url_and_download",
"tests/unit/test_download.py::Test_unpack_file_url::test_unpack_file_url_download_already_exists",
"tests/unit/test_download.py::Test_unpack_file_url::test_unpack_file_url_bad_hash",
"tests/unit/test_download.py::Test_unpack_file_url::test_unpack_file_url_download_bad_hash",
"tests/unit/test_download.py::Test_unpack_file_url::test_unpack_file_url_thats_a_dir",
"tests/unit/test_download.py::TestSafeFileCache::test_cache_roundtrip",
"tests/unit/test_download.py::TestSafeFileCache::test_safe_get_no_perms",
"tests/unit/test_download.py::TestSafeFileCache::test_safe_set_no_perms",
"tests/unit/test_download.py::TestSafeFileCache::test_safe_delete_no_perms",
"tests/unit/test_download.py::TestPipSession::test_cache_defaults_off",
"tests/unit/test_download.py::TestPipSession::test_cache_is_enabled",
"tests/unit/test_download.py::TestPipSession::test_http_cache_is_not_enabled",
"tests/unit/test_download.py::TestPipSession::test_insecure_host_cache_is_not_enabled"
]
| []
| MIT License | 1,133 | [
"news/3236.bugfix",
"pip/download.py"
]
| [
"news/3236.bugfix",
"pip/download.py"
]
|
|
zhmcclient__python-zhmcclient-229 | c829a05248bcbb05c23c50ecde4a1ca33010b9d3 | 2017-03-31 12:18:47 | 63bfc356570b865f4eac1d6a37c62e7b018520fc | coveralls:
[](https://coveralls.io/builds/10867587)
Coverage increased (+0.01%) to 88.898% when pulling **1bac9182e06c1152388d9c04aee56487528676ed on andy/fix-job-error** into **c6e1335c4e1b93540dae6494b98e82f1514f93b4 on master**.
| diff --git a/docs/changes.rst b/docs/changes.rst
index 3c7bbd6..3075dd5 100755
--- a/docs/changes.rst
+++ b/docs/changes.rst
@@ -48,6 +48,8 @@ Released: not yet
* Improved robustness of timestats tests by measuring the actual sleep time
instead of going by the requested sleep time.
+* Added support for 'error' field in 'job-results' (fixes issue #228).
+
**Enhancements:**
* Improved the mock support by adding the typical attributes of its superclass
diff --git a/zhmcclient/_session.py b/zhmcclient/_session.py
index 14b80fa..81ceb9b 100644
--- a/zhmcclient/_session.py
+++ b/zhmcclient/_session.py
@@ -824,7 +824,7 @@ class Session(object):
elif result.status_code == 202:
result_object = _result_object(result)
job_uri = result_object['job-uri']
- job = Job(self, job_uri)
+ job = Job(self, job_uri, 'POST', uri)
if wait_for_completion:
return job.wait_for_completion(operation_timeout)
else:
@@ -954,7 +954,7 @@ class Job(object):
completion.
"""
- def __init__(self, session, uri):
+ def __init__(self, session, uri, op_method, op_uri):
"""
Parameters:
@@ -967,9 +967,25 @@ class Job(object):
Must not be `None`.
Example: ``"/api/jobs/{job-id}"``
+
+ op_method (:term:`string`):
+ Name of the HTTP method of the operation that is executing
+ asynchronously on the HMC.
+ Must not be `None`.
+
+ Example: ``"POST"``
+
+ op_uri (:term:`string`):
+ Canonical URI of the operation that is executing asynchronously on
+ the HMC.
+ Must not be `None`.
+
+ Example: ``"/api/partitions/{partition-id}/stop"``
"""
self._session = session
self._uri = uri
+ self._op_method = op_method
+ self._op_uri = op_uri
@property
def session(self):
@@ -987,6 +1003,26 @@ class Job(object):
"""
return self._uri
+ @property
+ def op_method(self):
+ """
+ :term:`string`: Name of the HTTP method of the operation that is
+ executing asynchronously on the HMC.
+
+ Example: ``"POST"``
+ """
+ return self._op_method
+
+ @property
+ def op_uri(self):
+ """
+ :term:`string`: Canonical URI of the operation that is executing
+ asynchronously on the HMC.
+
+ Example: ``"/api/partitions/{partition-id}/stop"``
+ """
+ return self._op_uri
+
@logged_api_call
def check_for_completion(self):
"""
@@ -1027,25 +1063,33 @@ class Job(object):
job_status = job_result_obj['status']
if job_status == 'complete':
self.session.delete(self.uri)
- oper_status_code = job_result_obj['job-status-code']
- if oper_status_code in (200, 201):
- oper_result_obj = job_result_obj.get('job-results', None)
- elif oper_status_code == 204:
+ op_status_code = job_result_obj['job-status-code']
+ if op_status_code in (200, 201):
+ op_result_obj = job_result_obj.get('job-results', None)
+ elif op_status_code == 204:
# No content
- oper_result_obj = None
+ op_result_obj = None
else:
error_result_obj = job_result_obj.get('job-results', None)
- message = error_result_obj.get('message', None) \
- if error_result_obj else None
+ if not error_result_obj:
+ message = None
+ elif 'message' in error_result_obj:
+ message = error_result_obj['message']
+ elif 'error' in error_result_obj:
+ message = error_result_obj['error']
+ else:
+ message = None
error_obj = {
- 'http-status': oper_status_code,
+ 'http-status': op_status_code,
'reason': job_result_obj['job-reason-code'],
'message': message,
+ 'request-method': self.op_method,
+ 'request-uri': self.op_uri,
}
raise HTTPError(error_obj)
else:
- oper_result_obj = None
- return job_status, oper_result_obj
+ op_result_obj = None
+ return job_status, op_result_obj
@logged_api_call
def wait_for_completion(self, operation_timeout=None):
@@ -1098,13 +1142,13 @@ class Job(object):
start_time = time.time()
while True:
- job_status, oper_result_obj = self.check_for_completion()
+ job_status, op_result_obj = self.check_for_completion()
# We give completion of status priority over strictly achieving
# the timeout, so we check status first. This may cause a longer
# duration of the method than prescribed by the timeout.
if job_status == 'complete':
- return oper_result_obj
+ return op_result_obj
if operation_timeout > 0:
current_time = time.time()
| Starting a paused partition fails
### Actual behavior
This is the expected behavior by DPM (confirmed that) - but it's really strange. For nova-dpm we need ot fix it - the question is, if we fix it in zhmcclient or in nova-dpm.
- do an inband shutdown of a partition (e.g. via linux shell: shutdown -P now)
- partition state switches "Paused"
- try to start that partition using "Daily - Start"
This results in the following error on the UI
"STS00005E
One or more targeted partitions are already started. Only partitions in the Stopped state can be started.
To restart partitions, first run the Stop task, and then try the operation again."
The cli does not care about this issue at all, it just leaves the partition state to paused:
$ zhmc partition list P000S67B
/+-------------------------------------------------------------+------------+
| name | status |
|-------------------------------------------------------------+------------|
| andreas_cloud_init_tests | paused |
$ zhmc partition start P000S67B andreas_cloud_init_tests
|Partition andreas_cloud_init_tests has been started.
$ zhmc partition list P000S67B
/+-------------------------------------------------------------+------------+
| name | status |
|-------------------------------------------------------------+------------|
| andreas_cloud_init_tests | paused |
The python binding seems to behave like the cli. It returns immediately even if wait_for_completion ==True. There's no indication that the operation failed.
### Expected behavior
### Execution environment
* zhmcclient version: 0.9.0
* Operating system (type+version):
| zhmcclient/python-zhmcclient | diff --git a/tests/unit/test_session.py b/tests/unit/test_session.py
index 2f5dba2..64102b4 100755
--- a/tests/unit/test_session.py
+++ b/tests/unit/test_session.py
@@ -291,27 +291,37 @@ class JobTests(unittest.TestCase):
"""Test initialization of Job object."""
session = Session('fake-host', 'fake-user', 'fake-pw')
- job = Job(session, self.job_uri)
+ # Jobs exist only for POST, but we want to test that the specified HTTP
+ # method comes back regardless:
+ op_method = 'GET'
+
+ op_uri = '/api/bla'
+
+ job = Job(session, self.job_uri, op_method, op_uri)
self.assertEqual(job.uri, self.job_uri)
self.assertEqual(job.session, session)
+ self.assertEqual(job.op_method, op_method)
+ self.assertEqual(job.op_uri, op_uri)
def test_check_incomplete(self):
"""Test check_for_completion() with incomplete job."""
with requests_mock.mock() as m:
self.mock_server_1(m)
session = Session('fake-host', 'fake-user', 'fake-pw')
- job = Job(session, self.job_uri)
+ op_method = 'POST'
+ op_uri = '/api/foo'
+ job = Job(session, self.job_uri, op_method, op_uri)
query_job_status_result = {
'status': 'running',
}
m.get(self.job_uri, json=query_job_status_result)
m.delete(self.job_uri)
- job_status, oper_result = job.check_for_completion()
+ job_status, op_result = job.check_for_completion()
self.assertEqual(job_status, 'running')
- self.assertIsNone(oper_result)
+ self.assertIsNone(op_result)
def test_check_complete_success_noresult(self):
"""Test check_for_completion() with successful complete job without
@@ -319,7 +329,9 @@ class JobTests(unittest.TestCase):
with requests_mock.mock() as m:
self.mock_server_1(m)
session = Session('fake-host', 'fake-user', 'fake-pw')
- job = Job(session, self.job_uri)
+ op_method = 'POST'
+ op_uri = '/api/foo'
+ job = Job(session, self.job_uri, op_method, op_uri)
query_job_status_result = {
'status': 'complete',
'job-status-code': 200,
@@ -329,10 +341,10 @@ class JobTests(unittest.TestCase):
m.get(self.job_uri, json=query_job_status_result)
m.delete(self.job_uri)
- job_status, oper_result = job.check_for_completion()
+ job_status, op_result = job.check_for_completion()
self.assertEqual(job_status, 'complete')
- self.assertIsNone(oper_result)
+ self.assertIsNone(op_result)
def test_check_complete_success_result(self):
"""Test check_for_completion() with successful complete job with a
@@ -340,48 +352,133 @@ class JobTests(unittest.TestCase):
with requests_mock.mock() as m:
self.mock_server_1(m)
session = Session('fake-host', 'fake-user', 'fake-pw')
- job = Job(session, self.job_uri)
- exp_oper_result = {
+ op_method = 'POST'
+ op_uri = '/api/foo'
+ job = Job(session, self.job_uri, op_method, op_uri)
+ exp_op_result = {
'foo': 'bar',
}
query_job_status_result = {
'status': 'complete',
'job-status-code': 200,
# 'job-reason-code' omitted because HTTP status good
- 'job-results': exp_oper_result,
+ 'job-results': exp_op_result,
}
m.get(self.job_uri, json=query_job_status_result)
m.delete(self.job_uri)
- job_status, oper_result = job.check_for_completion()
+ job_status, op_result = job.check_for_completion()
self.assertEqual(job_status, 'complete')
- self.assertEqual(oper_result, exp_oper_result)
+ self.assertEqual(op_result, exp_op_result)
- def test_check_complete_error(self):
- """Test check_for_completion() with complete job in error."""
+ def test_check_complete_error1(self):
+ """Test check_for_completion() with complete job in error (1)."""
with requests_mock.mock() as m:
self.mock_server_1(m)
session = Session('fake-host', 'fake-user', 'fake-pw')
- job = Job(session, self.job_uri)
- error_result = {
- 'message': 'bla',
+ op_method = 'POST'
+ op_uri = '/api/foo'
+ job = Job(session, self.job_uri, op_method, op_uri)
+ query_job_status_result = {
+ 'status': 'complete',
+ 'job-status-code': 500,
+ 'job-reason-code': 42,
+ # no 'job-results' field (it is not guaranteed to be there)
}
+
+ m.get(self.job_uri, json=query_job_status_result)
+ m.delete(self.job_uri)
+
+ with self.assertRaises(HTTPError) as cm:
+ job_status, op_result = job.check_for_completion()
+
+ self.assertEqual(cm.exception.http_status, 500)
+ self.assertEqual(cm.exception.reason, 42)
+ self.assertEqual(cm.exception.message, None)
+
+ def test_check_complete_error2(self):
+ """Test check_for_completion() with complete job in error (2)."""
+ with requests_mock.mock() as m:
+ self.mock_server_1(m)
+ session = Session('fake-host', 'fake-user', 'fake-pw')
+ op_method = 'POST'
+ op_uri = '/api/foo'
+ job = Job(session, self.job_uri, op_method, op_uri)
query_job_status_result = {
'status': 'complete',
'job-status-code': 500,
'job-reason-code': 42,
- 'job-results': error_result,
+ 'job-results': {}, # it is not guaranteed to have any content
}
+
+ m.get(self.job_uri, json=query_job_status_result)
+ m.delete(self.job_uri)
+
+ with self.assertRaises(HTTPError) as cm:
+ job_status, op_result = job.check_for_completion()
+
+ self.assertEqual(cm.exception.http_status, 500)
+ self.assertEqual(cm.exception.reason, 42)
+ self.assertEqual(cm.exception.message, None)
+
+ def test_check_complete_error3(self):
+ """Test check_for_completion() with complete job in error (3)."""
+ with requests_mock.mock() as m:
+ self.mock_server_1(m)
+ session = Session('fake-host', 'fake-user', 'fake-pw')
+ op_method = 'POST'
+ op_uri = '/api/foo'
+ job = Job(session, self.job_uri, op_method, op_uri)
+ query_job_status_result = {
+ 'status': 'complete',
+ 'job-status-code': 500,
+ 'job-reason-code': 42,
+ 'job-results': {
+ # Content is not documented for the error case.
+ # Some failures result in an 'error' field.
+ 'error': 'bla message',
+ },
+ }
+
+ m.get(self.job_uri, json=query_job_status_result)
+ m.delete(self.job_uri)
+
+ with self.assertRaises(HTTPError) as cm:
+ job_status, op_result = job.check_for_completion()
+
+ self.assertEqual(cm.exception.http_status, 500)
+ self.assertEqual(cm.exception.reason, 42)
+ self.assertEqual(cm.exception.message, 'bla message')
+
+ def test_check_complete_error4(self):
+ """Test check_for_completion() with complete job in error (4)."""
+ with requests_mock.mock() as m:
+ self.mock_server_1(m)
+ session = Session('fake-host', 'fake-user', 'fake-pw')
+ op_method = 'POST'
+ op_uri = '/api/foo'
+ job = Job(session, self.job_uri, op_method, op_uri)
+ query_job_status_result = {
+ 'status': 'complete',
+ 'job-status-code': 500,
+ 'job-reason-code': 42,
+ 'job-results': {
+ # Content is not documented for the error case.
+ # Some failures result in an 'message' field.
+ 'message': 'bla message',
+ },
+ }
+
m.get(self.job_uri, json=query_job_status_result)
m.delete(self.job_uri)
with self.assertRaises(HTTPError) as cm:
- job_status, oper_result = job.check_for_completion()
+ job_status, op_result = job.check_for_completion()
self.assertEqual(cm.exception.http_status, 500)
self.assertEqual(cm.exception.reason, 42)
- self.assertEqual(cm.exception.message, 'bla')
+ self.assertEqual(cm.exception.message, 'bla message')
def test_wait_complete1_success_result(self):
"""Test wait_for_completion() with successful complete job with a
@@ -389,22 +486,24 @@ class JobTests(unittest.TestCase):
with requests_mock.mock() as m:
self.mock_server_1(m)
session = Session('fake-host', 'fake-user', 'fake-pw')
- job = Job(session, self.job_uri)
- exp_oper_result = {
+ op_method = 'POST'
+ op_uri = '/api/foo'
+ job = Job(session, self.job_uri, op_method, op_uri)
+ exp_op_result = {
'foo': 'bar',
}
query_job_status_result = {
'status': 'complete',
'job-status-code': 200,
# 'job-reason-code' omitted because HTTP status good
- 'job-results': exp_oper_result,
+ 'job-results': exp_op_result,
}
m.get(self.job_uri, json=query_job_status_result)
m.delete(self.job_uri)
- oper_result = job.wait_for_completion()
+ op_result = job.wait_for_completion()
- self.assertEqual(oper_result, exp_oper_result)
+ self.assertEqual(op_result, exp_op_result)
def test_wait_complete3_success_result(self):
"""Test wait_for_completion() with successful complete job with a
@@ -412,8 +511,10 @@ class JobTests(unittest.TestCase):
with requests_mock.mock() as m:
self.mock_server_1(m)
session = Session('fake-host', 'fake-user', 'fake-pw')
- job = Job(session, self.job_uri)
- exp_oper_result = {
+ op_method = 'POST'
+ op_uri = '/api/foo'
+ job = Job(session, self.job_uri, op_method, op_uri)
+ exp_op_result = {
'foo': 'bar',
}
m.get(self.job_uri,
@@ -423,16 +524,18 @@ class JobTests(unittest.TestCase):
])
m.delete(self.job_uri)
- oper_result = job.wait_for_completion()
+ op_result = job.wait_for_completion()
- self.assertEqual(oper_result, exp_oper_result)
+ self.assertEqual(op_result, exp_op_result)
def test_wait_complete3_timeout(self):
"""Test wait_for_completion() with timeout."""
with requests_mock.mock() as m:
self.mock_server_1(m)
session = Session('fake-host', 'fake-user', 'fake-pw')
- job = Job(session, self.job_uri)
+ op_method = 'POST'
+ op_uri = '/api/foo'
+ job = Job(session, self.job_uri, op_method, op_uri)
m.get(self.job_uri,
[
{'text': result_running_callback},
@@ -475,14 +578,14 @@ def result_running_callback(request, context):
def result_complete_callback(request, context):
- exp_oper_result = {
+ exp_op_result = {
'foo': 'bar',
}
job_result_complete = {
'status': 'complete',
'job-status-code': 200,
# 'job-reason-code' omitted because HTTP status good
- 'job-results': exp_oper_result,
+ 'job-results': exp_op_result,
}
time.sleep(1)
return json.dumps(job_result_complete)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks",
"has_pytest_match_arg"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 2
} | 0.11 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest pytest-cov pytest-mock pytest-xdist"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc build-essential"
],
"python": "3.6",
"reqs_path": [
"requirements.txt",
"dev-requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.13
anyio==3.6.2
argon2-cffi==21.3.0
argon2-cffi-bindings==21.2.0
async-generator==1.10
attrs==22.2.0
Babel==2.11.0
backcall==0.2.0
bleach==4.1.0
certifi==2021.5.30
cffi==1.15.1
charset-normalizer==2.0.12
click==8.0.4
click-repl==0.3.0
click-spinner==0.1.10
colorama==0.4.5
comm==0.1.4
contextvars==2.4
coverage==6.2
cryptography==40.0.2
dataclasses==0.8
decorator==5.1.1
defusedxml==0.7.1
docopt==0.6.2
docutils==0.18.1
entrypoints==0.4
execnet==1.9.0
flake8==3.9.2
gitdb==4.0.9
GitPython==3.1.18
idna==3.10
imagesize==1.4.1
immutables==0.19
importlib-metadata==4.8.3
importlib-resources==5.4.0
iniconfig==1.1.1
ipykernel==5.5.6
ipython==7.16.3
ipython-genutils==0.2.0
ipywidgets==7.8.5
jedi==0.17.2
jeepney==0.7.1
Jinja2==3.0.3
json5==0.9.16
jsonschema==3.2.0
jupyter==1.1.1
jupyter-client==7.1.2
jupyter-console==6.4.3
jupyter-core==4.9.2
jupyter-server==1.13.1
jupyterlab==3.2.9
jupyterlab-pygments==0.1.2
jupyterlab-server==2.10.3
jupyterlab_widgets==1.1.11
keyring==23.4.1
MarkupSafe==2.0.1
mccabe==0.6.1
mistune==0.8.4
mock==5.2.0
nbclassic==0.3.5
nbclient==0.5.9
nbconvert==6.0.7
nbformat==5.1.3
nest-asyncio==1.6.0
notebook==6.4.10
packaging==21.3
pandocfilters==1.5.1
parso==0.7.1
pbr==6.1.1
pexpect==4.9.0
pickleshare==0.7.5
pkginfo==1.10.0
pluggy==1.0.0
progressbar2==3.55.0
prometheus-client==0.17.1
prompt-toolkit==3.0.36
ptyprocess==0.7.0
py==1.11.0
pycodestyle==2.7.0
pycparser==2.21
pyflakes==2.3.1
Pygments==2.14.0
pyparsing==3.1.4
pyrsistent==0.18.0
pytest==7.0.1
pytest-cov==4.0.0
pytest-mock==3.6.1
pytest-xdist==3.0.2
python-dateutil==2.9.0.post0
python-utils==3.5.2
pytz==2025.2
PyYAML==6.0.1
pyzmq==25.1.2
readme-renderer==34.0
requests==2.27.1
requests-mock==1.12.1
requests-toolbelt==1.0.0
rfc3986==1.5.0
SecretStorage==3.3.3
Send2Trash==1.8.3
six==1.17.0
smmap==5.0.0
sniffio==1.2.0
snowballstemmer==2.2.0
Sphinx==5.3.0
sphinx-git==11.0.0
sphinxcontrib-applehelp==1.0.2
sphinxcontrib-devhelp==1.0.2
sphinxcontrib-htmlhelp==2.0.0
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==1.0.3
sphinxcontrib-serializinghtml==1.1.5
stomp.py==8.1.0
tabulate==0.8.10
terminado==0.12.1
testfixtures==7.2.2
testpath==0.6.0
tomli==1.2.3
tornado==6.1
tqdm==4.64.1
traitlets==4.3.3
twine==3.8.0
typing_extensions==4.1.1
urllib3==1.26.20
wcwidth==0.2.13
webencodings==0.5.1
websocket-client==1.3.1
widgetsnbextension==3.6.10
-e git+https://github.com/zhmcclient/python-zhmcclient.git@c829a05248bcbb05c23c50ecde4a1ca33010b9d3#egg=zhmcclient
zipp==3.6.0
| name: python-zhmcclient
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.13
- anyio==3.6.2
- argon2-cffi==21.3.0
- argon2-cffi-bindings==21.2.0
- async-generator==1.10
- attrs==22.2.0
- babel==2.11.0
- backcall==0.2.0
- bleach==4.1.0
- cffi==1.15.1
- charset-normalizer==2.0.12
- click==8.0.4
- click-repl==0.3.0
- click-spinner==0.1.10
- colorama==0.4.5
- comm==0.1.4
- contextvars==2.4
- coverage==6.2
- cryptography==40.0.2
- dataclasses==0.8
- decorator==5.1.1
- defusedxml==0.7.1
- docopt==0.6.2
- docutils==0.18.1
- entrypoints==0.4
- execnet==1.9.0
- flake8==3.9.2
- gitdb==4.0.9
- gitpython==3.1.18
- idna==3.10
- imagesize==1.4.1
- immutables==0.19
- importlib-metadata==4.8.3
- importlib-resources==5.4.0
- iniconfig==1.1.1
- ipykernel==5.5.6
- ipython==7.16.3
- ipython-genutils==0.2.0
- ipywidgets==7.8.5
- jedi==0.17.2
- jeepney==0.7.1
- jinja2==3.0.3
- json5==0.9.16
- jsonschema==3.2.0
- jupyter==1.1.1
- jupyter-client==7.1.2
- jupyter-console==6.4.3
- jupyter-core==4.9.2
- jupyter-server==1.13.1
- jupyterlab==3.2.9
- jupyterlab-pygments==0.1.2
- jupyterlab-server==2.10.3
- jupyterlab-widgets==1.1.11
- keyring==23.4.1
- markupsafe==2.0.1
- mccabe==0.6.1
- mistune==0.8.4
- mock==5.2.0
- nbclassic==0.3.5
- nbclient==0.5.9
- nbconvert==6.0.7
- nbformat==5.1.3
- nest-asyncio==1.6.0
- notebook==6.4.10
- packaging==21.3
- pandocfilters==1.5.1
- parso==0.7.1
- pbr==6.1.1
- pexpect==4.9.0
- pickleshare==0.7.5
- pkginfo==1.10.0
- pluggy==1.0.0
- progressbar2==3.55.0
- prometheus-client==0.17.1
- prompt-toolkit==3.0.36
- ptyprocess==0.7.0
- py==1.11.0
- pycodestyle==2.7.0
- pycparser==2.21
- pyflakes==2.3.1
- pygments==2.14.0
- pyparsing==3.1.4
- pyrsistent==0.18.0
- pytest==7.0.1
- pytest-cov==4.0.0
- pytest-mock==3.6.1
- pytest-xdist==3.0.2
- python-dateutil==2.9.0.post0
- python-utils==3.5.2
- pytz==2025.2
- pyyaml==6.0.1
- pyzmq==25.1.2
- readme-renderer==34.0
- requests==2.27.1
- requests-mock==1.12.1
- requests-toolbelt==1.0.0
- rfc3986==1.5.0
- secretstorage==3.3.3
- send2trash==1.8.3
- six==1.17.0
- smmap==5.0.0
- sniffio==1.2.0
- snowballstemmer==2.2.0
- sphinx==5.3.0
- sphinx-git==11.0.0
- sphinxcontrib-applehelp==1.0.2
- sphinxcontrib-devhelp==1.0.2
- sphinxcontrib-htmlhelp==2.0.0
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==1.0.3
- sphinxcontrib-serializinghtml==1.1.5
- stomp-py==8.1.0
- tabulate==0.8.10
- terminado==0.12.1
- testfixtures==7.2.2
- testpath==0.6.0
- tomli==1.2.3
- tornado==6.1
- tqdm==4.64.1
- traitlets==4.3.3
- twine==3.8.0
- typing-extensions==4.1.1
- urllib3==1.26.20
- wcwidth==0.2.13
- webencodings==0.5.1
- websocket-client==1.3.1
- widgetsnbextension==3.6.10
- zipp==3.6.0
prefix: /opt/conda/envs/python-zhmcclient
| [
"tests/unit/test_session.py::JobTests::test_check_complete_error1",
"tests/unit/test_session.py::JobTests::test_check_complete_error2",
"tests/unit/test_session.py::JobTests::test_check_complete_error3",
"tests/unit/test_session.py::JobTests::test_check_complete_error4",
"tests/unit/test_session.py::JobTests::test_check_complete_success_noresult",
"tests/unit/test_session.py::JobTests::test_check_complete_success_result",
"tests/unit/test_session.py::JobTests::test_check_incomplete",
"tests/unit/test_session.py::JobTests::test_init",
"tests/unit/test_session.py::JobTests::test_wait_complete1_success_result",
"tests/unit/test_session.py::JobTests::test_wait_complete3_success_result",
"tests/unit/test_session.py::JobTests::test_wait_complete3_timeout"
]
| [
"tests/unit/test_session.py::SessionTests::test_get_error_html_1"
]
| [
"tests/unit/test_session.py::SessionTests::test_get_notification_topics",
"tests/unit/test_session.py::SessionTests::test_init",
"tests/unit/test_session.py::SessionTests::test_logon_error_extra_closing",
"tests/unit/test_session.py::SessionTests::test_logon_error_invalid_delim",
"tests/unit/test_session.py::SessionTests::test_logon_error_invalid_quotes",
"tests/unit/test_session.py::SessionTests::test_logon_logoff"
]
| []
| Apache License 2.0 | 1,134 | [
"docs/changes.rst",
"zhmcclient/_session.py"
]
| [
"docs/changes.rst",
"zhmcclient/_session.py"
]
|
dask__dask-2148 | d004c13e775fca0d7bd71448531590bc99f726f9 | 2017-03-31 14:44:06 | bdb021c7dcd94ae1fa51c82fae6cf4cf7319aa14 | diff --git a/dask/array/__init__.py b/dask/array/__init__.py
index 23cd3800a..57a6fd8f0 100644
--- a/dask/array/__init__.py
+++ b/dask/array/__init__.py
@@ -3,7 +3,7 @@ from __future__ import absolute_import, division, print_function
from ..utils import ignoring
from .core import (Array, stack, concatenate, take, tensordot, transpose,
from_array, choose, where, coarsen, insert, broadcast_to, ravel,
- roll, fromfunction, unique, store, squeeze, topk, bincount,
+ roll, fromfunction, unique, store, squeeze, topk, bincount, tile,
digitize, histogram, map_blocks, atop, to_hdf5, dot, cov, array,
dstack, vstack, hstack, to_npy_stack, from_npy_stack, compress,
from_delayed, round, swapaxes, repeat, asarray)
diff --git a/dask/array/core.py b/dask/array/core.py
index 26539919b..b0d9d229f 100644
--- a/dask/array/core.py
+++ b/dask/array/core.py
@@ -2618,6 +2618,8 @@ def elemwise(op, *args, **kwargs):
msg = "%s does not take the following keyword arguments %s"
raise TypeError(msg % (op.__name__, str(sorted(set(kwargs) - set(['name', 'dtype'])))))
+ args = [np.asarray(a) if isinstance(a, (list, tuple)) else a for a in args]
+
shapes = [getattr(arg, 'shape', ()) for arg in args]
shapes = [s if isinstance(s, Iterable) else () for s in shapes]
out_ndim = len(broadcast_shapes(*shapes)) # Raises ValueError if dimensions mismatch
@@ -3797,7 +3799,7 @@ def swapaxes(a, axis1, axis2):
dtype=a.dtype)
-@wraps(np.dot)
+@wraps(np.repeat)
def repeat(a, repeats, axis=None):
if axis is None:
if a.ndim == 1:
@@ -3838,6 +3840,21 @@ def repeat(a, repeats, axis=None):
return concatenate(out, axis=axis)
+@wraps(np.tile)
+def tile(A, reps):
+ if not isinstance(reps, Integral):
+ raise NotImplementedError("Only integer valued `reps` supported.")
+
+ if reps < 0:
+ raise ValueError("Negative `reps` are not allowed.")
+ elif reps == 0:
+ return A[..., :0]
+ elif reps == 1:
+ return A
+
+ return concatenate(reps * [A], axis=-1)
+
+
def slice_with_dask_array(x, index):
y = elemwise(getitem, x, index, dtype=x.dtype)
diff --git a/docs/source/array-api.rst b/docs/source/array-api.rst
index 4750f8b0d..32503530b 100644
--- a/docs/source/array-api.rst
+++ b/docs/source/array-api.rst
@@ -127,6 +127,7 @@ Top level user functions:
tan
tanh
tensordot
+ tile
topk
transpose
tril
| unexpected behavior with da.vstack and broadcasting
I have run into some unexpected behavior with `dask.array.vstack`, which seems like a bug to me.
```python
import dask.array as da
import numpy as np
x = da.arange(12, chunks=100).reshape((4,3))
y = da.vstack([x[:,0], x[:,1], x[:,2]]).T
ans1 = x * [0,0,1]
ans2 = y * [0,0,1]
# this is true
print(np.array_equal(x.compute() * [0,0,1], ans1.compute()))
# this is false
print(np.array_equal(ans1.compute(), ans2.compute()))
# a broadcast appears to have failed; shape is (4,9)
print(ans2.compute().shape)
```
I'm using dask v0.14 here. | dask/dask | diff --git a/dask/array/tests/test_array_core.py b/dask/array/tests/test_array_core.py
index 7e4fc14ef..b9b185b49 100644
--- a/dask/array/tests/test_array_core.py
+++ b/dask/array/tests/test_array_core.py
@@ -2589,6 +2589,44 @@ def test_repeat():
assert all(concat(d.repeat(r).chunks))
[email protected]('shape, chunks', [
+ ((10,), (1,)),
+ ((10, 11, 13), (4, 5, 3)),
+])
[email protected]('reps', [0, 1, 2, 3, 5])
+def test_tile(shape, chunks, reps):
+ x = np.random.random(shape)
+ d = da.from_array(x, chunks=chunks)
+
+ assert_eq(np.tile(x, reps), da.tile(d, reps))
+
+
[email protected]('shape, chunks', [
+ ((10,), (1,)),
+ ((10, 11, 13), (4, 5, 3)),
+])
[email protected]('reps', [-1, -5])
+def test_tile_neg_reps(shape, chunks, reps):
+ x = np.random.random(shape)
+ d = da.from_array(x, chunks=chunks)
+
+ with pytest.raises(ValueError):
+ da.tile(d, reps)
+
+
[email protected]('shape, chunks', [
+ ((10,), (1,)),
+ ((10, 11, 13), (4, 5, 3)),
+])
[email protected]('reps', [[1], [1, 2]])
+def test_tile_array_reps(shape, chunks, reps):
+ x = np.random.random(shape)
+ d = da.from_array(x, chunks=chunks)
+
+ with pytest.raises(NotImplementedError):
+ da.tile(d, reps)
+
+
def test_concatenate_stack_dont_warn():
with warnings.catch_warnings(record=True) as record:
da.concatenate([da.ones(2, chunks=1)] * 62)
@@ -2852,3 +2890,20 @@ def test_atop_with_numpy_arrays():
s = da.sum(x)
assert any(x is v for v in s.dask.values())
+
+
[email protected]('chunks', (100, 6))
[email protected]('other', [[0, 0, 1], [2, 1, 3], (0, 0, 1)])
+def test_elemwise_with_lists(chunks, other):
+ x = np.arange(12).reshape((4, 3))
+ d = da.arange(12, chunks=chunks).reshape((4, 3))
+
+ x2 = np.vstack([x[:, 0], x[:, 1], x[:, 2]]).T
+ d2 = da.vstack([d[:, 0], d[:, 1], d[:, 2]]).T
+
+ assert_eq(x2, d2)
+
+ x3 = x2 * other
+ d3 = d2 * other
+
+ assert_eq(x3, d3)
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 3
} | 1.16 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[complete]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest",
"flake8",
"pandas_datareader",
"pytest-xdist"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.6",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | aiobotocore==2.1.2
aiohttp==3.8.6
aioitertools==0.11.0
aiosignal==1.2.0
async-timeout==4.0.2
asynctest==0.13.0
attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
botocore==1.23.24
certifi==2021.5.30
charset-normalizer==2.0.12
click==8.0.4
cloudpickle==2.2.1
-e git+https://github.com/dask/dask.git@d004c13e775fca0d7bd71448531590bc99f726f9#egg=dask
distributed==1.19.3
execnet==1.9.0
flake8==5.0.4
frozenlist==1.2.0
fsspec==2022.1.0
HeapDict==1.0.1
idna==3.10
idna-ssl==1.1.0
importlib-metadata==4.2.0
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
jmespath==0.10.0
locket==1.0.0
lxml==5.3.1
mccabe==0.7.0
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
msgpack-python==0.5.6
multidict==5.2.0
numpy==1.19.5
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pandas==1.1.5
pandas-datareader==0.10.0
partd==1.2.0
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
psutil==7.0.0
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pycodestyle==2.9.1
pyflakes==2.5.0
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
pytest-xdist==3.0.2
python-dateutil==2.9.0.post0
pytz==2025.2
requests==2.27.1
s3fs==2022.1.0
six==1.17.0
sortedcontainers==2.4.0
tblib==1.7.0
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
toolz==0.12.0
tornado==6.1
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
urllib3==1.26.20
wrapt==1.16.0
yarl==1.7.2
zict==2.1.0
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: dask
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- aiobotocore==2.1.2
- aiohttp==3.8.6
- aioitertools==0.11.0
- aiosignal==1.2.0
- async-timeout==4.0.2
- asynctest==0.13.0
- botocore==1.23.24
- charset-normalizer==2.0.12
- click==8.0.4
- cloudpickle==2.2.1
- distributed==1.19.3
- execnet==1.9.0
- flake8==5.0.4
- frozenlist==1.2.0
- fsspec==2022.1.0
- heapdict==1.0.1
- idna==3.10
- idna-ssl==1.1.0
- importlib-metadata==4.2.0
- jmespath==0.10.0
- locket==1.0.0
- lxml==5.3.1
- mccabe==0.7.0
- msgpack-python==0.5.6
- multidict==5.2.0
- numpy==1.19.5
- pandas==1.1.5
- pandas-datareader==0.10.0
- partd==1.2.0
- psutil==7.0.0
- pycodestyle==2.9.1
- pyflakes==2.5.0
- pytest-xdist==3.0.2
- python-dateutil==2.9.0.post0
- pytz==2025.2
- requests==2.27.1
- s3fs==2022.1.0
- six==1.17.0
- sortedcontainers==2.4.0
- tblib==1.7.0
- toolz==0.12.0
- tornado==6.1
- urllib3==1.26.20
- wrapt==1.16.0
- yarl==1.7.2
- zict==2.1.0
prefix: /opt/conda/envs/dask
| [
"dask/array/tests/test_array_core.py::test_tile[0-shape0-chunks0]",
"dask/array/tests/test_array_core.py::test_tile[0-shape1-chunks1]",
"dask/array/tests/test_array_core.py::test_tile[1-shape0-chunks0]",
"dask/array/tests/test_array_core.py::test_tile[1-shape1-chunks1]",
"dask/array/tests/test_array_core.py::test_tile[2-shape0-chunks0]",
"dask/array/tests/test_array_core.py::test_tile[2-shape1-chunks1]",
"dask/array/tests/test_array_core.py::test_tile[3-shape0-chunks0]",
"dask/array/tests/test_array_core.py::test_tile[3-shape1-chunks1]",
"dask/array/tests/test_array_core.py::test_tile[5-shape0-chunks0]",
"dask/array/tests/test_array_core.py::test_tile[5-shape1-chunks1]",
"dask/array/tests/test_array_core.py::test_tile_neg_reps[-1-shape0-chunks0]",
"dask/array/tests/test_array_core.py::test_tile_neg_reps[-1-shape1-chunks1]",
"dask/array/tests/test_array_core.py::test_tile_neg_reps[-5-shape0-chunks0]",
"dask/array/tests/test_array_core.py::test_tile_neg_reps[-5-shape1-chunks1]",
"dask/array/tests/test_array_core.py::test_tile_array_reps[reps0-shape0-chunks0]",
"dask/array/tests/test_array_core.py::test_tile_array_reps[reps0-shape1-chunks1]",
"dask/array/tests/test_array_core.py::test_tile_array_reps[reps1-shape0-chunks0]",
"dask/array/tests/test_array_core.py::test_tile_array_reps[reps1-shape1-chunks1]",
"dask/array/tests/test_array_core.py::test_elemwise_with_lists[other0-100]",
"dask/array/tests/test_array_core.py::test_elemwise_with_lists[other0-6]",
"dask/array/tests/test_array_core.py::test_elemwise_with_lists[other1-100]",
"dask/array/tests/test_array_core.py::test_elemwise_with_lists[other1-6]",
"dask/array/tests/test_array_core.py::test_elemwise_with_lists[other2-100]",
"dask/array/tests/test_array_core.py::test_elemwise_with_lists[other2-6]"
]
| [
"dask/array/tests/test_array_core.py::test_field_access",
"dask/array/tests/test_array_core.py::test_field_access_with_shape",
"dask/array/tests/test_array_core.py::test_setitem_mixed_d"
]
| [
"dask/array/tests/test_array_core.py::test_getem",
"dask/array/tests/test_array_core.py::test_top",
"dask/array/tests/test_array_core.py::test_top_supports_broadcasting_rules",
"dask/array/tests/test_array_core.py::test_concatenate3_on_scalars",
"dask/array/tests/test_array_core.py::test_chunked_dot_product",
"dask/array/tests/test_array_core.py::test_chunked_transpose_plus_one",
"dask/array/tests/test_array_core.py::test_transpose",
"dask/array/tests/test_array_core.py::test_broadcast_dimensions_works_with_singleton_dimensions",
"dask/array/tests/test_array_core.py::test_broadcast_dimensions",
"dask/array/tests/test_array_core.py::test_Array",
"dask/array/tests/test_array_core.py::test_uneven_chunks",
"dask/array/tests/test_array_core.py::test_numblocks_suppoorts_singleton_block_dims",
"dask/array/tests/test_array_core.py::test_keys",
"dask/array/tests/test_array_core.py::test_Array_computation",
"dask/array/tests/test_array_core.py::test_stack",
"dask/array/tests/test_array_core.py::test_short_stack",
"dask/array/tests/test_array_core.py::test_stack_scalars",
"dask/array/tests/test_array_core.py::test_stack_rechunk",
"dask/array/tests/test_array_core.py::test_concatenate",
"dask/array/tests/test_array_core.py::test_concatenate_rechunk",
"dask/array/tests/test_array_core.py::test_concatenate_fixlen_strings",
"dask/array/tests/test_array_core.py::test_vstack",
"dask/array/tests/test_array_core.py::test_hstack",
"dask/array/tests/test_array_core.py::test_dstack",
"dask/array/tests/test_array_core.py::test_take",
"dask/array/tests/test_array_core.py::test_compress",
"dask/array/tests/test_array_core.py::test_binops",
"dask/array/tests/test_array_core.py::test_isnull",
"dask/array/tests/test_array_core.py::test_isclose",
"dask/array/tests/test_array_core.py::test_broadcast_shapes",
"dask/array/tests/test_array_core.py::test_elemwise_on_scalars",
"dask/array/tests/test_array_core.py::test_partial_by_order",
"dask/array/tests/test_array_core.py::test_elemwise_with_ndarrays",
"dask/array/tests/test_array_core.py::test_elemwise_differently_chunked",
"dask/array/tests/test_array_core.py::test_operators",
"dask/array/tests/test_array_core.py::test_operator_dtype_promotion",
"dask/array/tests/test_array_core.py::test_tensordot",
"dask/array/tests/test_array_core.py::test_dot_method",
"dask/array/tests/test_array_core.py::test_T",
"dask/array/tests/test_array_core.py::test_norm",
"dask/array/tests/test_array_core.py::test_choose",
"dask/array/tests/test_array_core.py::test_where",
"dask/array/tests/test_array_core.py::test_where_has_informative_error",
"dask/array/tests/test_array_core.py::test_coarsen",
"dask/array/tests/test_array_core.py::test_coarsen_with_excess",
"dask/array/tests/test_array_core.py::test_insert",
"dask/array/tests/test_array_core.py::test_multi_insert",
"dask/array/tests/test_array_core.py::test_broadcast_to",
"dask/array/tests/test_array_core.py::test_ravel",
"dask/array/tests/test_array_core.py::test_roll[None-3-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[None-3-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[None-7-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[None-7-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[None-9-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[None-9-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[None-shift3-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[None-shift3-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[None-shift4-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[None-shift4-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[0-3-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[0-3-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[0-7-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[0-7-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[0-9-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[0-9-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[0-shift3-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[0-shift3-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[0-shift4-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[0-shift4-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[1-3-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[1-3-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[1-7-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[1-7-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[1-9-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[1-9-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[1-shift3-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[1-shift3-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[1-shift4-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[1-shift4-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[-1-3-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[-1-3-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[-1-7-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[-1-7-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[-1-9-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[-1-9-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[-1-shift3-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[-1-shift3-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[-1-shift4-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[-1-shift4-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[axis4-3-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[axis4-3-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[axis4-7-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[axis4-7-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[axis4-9-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[axis4-9-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[axis4-shift3-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[axis4-shift3-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[axis4-shift4-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[axis4-shift4-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[axis5-3-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[axis5-3-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[axis5-7-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[axis5-7-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[axis5-9-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[axis5-9-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[axis5-shift3-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[axis5-shift3-chunks1]",
"dask/array/tests/test_array_core.py::test_roll[axis5-shift4-chunks0]",
"dask/array/tests/test_array_core.py::test_roll[axis5-shift4-chunks1]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape0-new_shape0-chunks0]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape1-new_shape1-5]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape2-new_shape2-5]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape3-new_shape3-12]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape4-new_shape4-12]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape5-new_shape5-chunks5]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape6-new_shape6-4]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape7-new_shape7-4]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape8-new_shape8-4]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape9-new_shape9-2]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape10-new_shape10-2]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape11-new_shape11-2]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape12-new_shape12-2]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape13-new_shape13-2]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape14-new_shape14-2]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape15-new_shape15-2]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape16-new_shape16-chunks16]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape17-new_shape17-3]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape18-new_shape18-4]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape19-new_shape19-chunks19]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape20-new_shape20-1]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape21-new_shape21-1]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape22-new_shape22-24]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape23-new_shape23-6]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape24-new_shape24-6]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape25-new_shape25-6]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape26-new_shape26-chunks26]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape27-new_shape27-chunks27]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape28-new_shape28-chunks28]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape29-new_shape29-chunks29]",
"dask/array/tests/test_array_core.py::test_reshape[original_shape30-new_shape30-chunks30]",
"dask/array/tests/test_array_core.py::test_reshape_exceptions",
"dask/array/tests/test_array_core.py::test_reshape_splat",
"dask/array/tests/test_array_core.py::test_reshape_fails_for_dask_only",
"dask/array/tests/test_array_core.py::test_reshape_unknown_dimensions",
"dask/array/tests/test_array_core.py::test_full",
"dask/array/tests/test_array_core.py::test_map_blocks",
"dask/array/tests/test_array_core.py::test_map_blocks2",
"dask/array/tests/test_array_core.py::test_map_blocks_with_constants",
"dask/array/tests/test_array_core.py::test_map_blocks_with_kwargs",
"dask/array/tests/test_array_core.py::test_map_blocks_with_chunks",
"dask/array/tests/test_array_core.py::test_map_blocks_dtype_inference",
"dask/array/tests/test_array_core.py::test_fromfunction",
"dask/array/tests/test_array_core.py::test_from_function_requires_block_args",
"dask/array/tests/test_array_core.py::test_repr",
"dask/array/tests/test_array_core.py::test_slicing_with_ellipsis",
"dask/array/tests/test_array_core.py::test_slicing_with_ndarray",
"dask/array/tests/test_array_core.py::test_dtype",
"dask/array/tests/test_array_core.py::test_blockdims_from_blockshape",
"dask/array/tests/test_array_core.py::test_coerce",
"dask/array/tests/test_array_core.py::test_store",
"dask/array/tests/test_array_core.py::test_store_regions",
"dask/array/tests/test_array_core.py::test_store_compute_false",
"dask/array/tests/test_array_core.py::test_store_locks",
"dask/array/tests/test_array_core.py::test_np_array_with_zero_dimensions",
"dask/array/tests/test_array_core.py::test_unique",
"dask/array/tests/test_array_core.py::test_dtype_complex",
"dask/array/tests/test_array_core.py::test_astype",
"dask/array/tests/test_array_core.py::test_arithmetic",
"dask/array/tests/test_array_core.py::test_elemwise_consistent_names",
"dask/array/tests/test_array_core.py::test_optimize",
"dask/array/tests/test_array_core.py::test_slicing_with_non_ndarrays",
"dask/array/tests/test_array_core.py::test_getarray",
"dask/array/tests/test_array_core.py::test_squeeze",
"dask/array/tests/test_array_core.py::test_size",
"dask/array/tests/test_array_core.py::test_nbytes",
"dask/array/tests/test_array_core.py::test_itemsize",
"dask/array/tests/test_array_core.py::test_Array_normalizes_dtype",
"dask/array/tests/test_array_core.py::test_args",
"dask/array/tests/test_array_core.py::test_from_array_with_lock",
"dask/array/tests/test_array_core.py::test_from_array_slicing_results_in_ndarray",
"dask/array/tests/test_array_core.py::test_asarray",
"dask/array/tests/test_array_core.py::test_from_func",
"dask/array/tests/test_array_core.py::test_topk",
"dask/array/tests/test_array_core.py::test_topk_k_bigger_than_chunk",
"dask/array/tests/test_array_core.py::test_bincount",
"dask/array/tests/test_array_core.py::test_bincount_with_weights",
"dask/array/tests/test_array_core.py::test_bincount_raises_informative_error_on_missing_minlength_kwarg",
"dask/array/tests/test_array_core.py::test_digitize",
"dask/array/tests/test_array_core.py::test_histogram",
"dask/array/tests/test_array_core.py::test_histogram_alternative_bins_range",
"dask/array/tests/test_array_core.py::test_histogram_return_type",
"dask/array/tests/test_array_core.py::test_histogram_extra_args_and_shapes",
"dask/array/tests/test_array_core.py::test_concatenate3_2",
"dask/array/tests/test_array_core.py::test_map_blocks3",
"dask/array/tests/test_array_core.py::test_from_array_with_missing_chunks",
"dask/array/tests/test_array_core.py::test_cache",
"dask/array/tests/test_array_core.py::test_take_dask_from_numpy",
"dask/array/tests/test_array_core.py::test_normalize_chunks",
"dask/array/tests/test_array_core.py::test_raise_on_no_chunks",
"dask/array/tests/test_array_core.py::test_chunks_is_immutable",
"dask/array/tests/test_array_core.py::test_raise_on_bad_kwargs",
"dask/array/tests/test_array_core.py::test_long_slice",
"dask/array/tests/test_array_core.py::test_ellipsis_slicing",
"dask/array/tests/test_array_core.py::test_point_slicing",
"dask/array/tests/test_array_core.py::test_point_slicing_with_full_slice",
"dask/array/tests/test_array_core.py::test_slice_with_floats",
"dask/array/tests/test_array_core.py::test_vindex_errors",
"dask/array/tests/test_array_core.py::test_vindex_merge",
"dask/array/tests/test_array_core.py::test_empty_array",
"dask/array/tests/test_array_core.py::test_array",
"dask/array/tests/test_array_core.py::test_cov",
"dask/array/tests/test_array_core.py::test_corrcoef",
"dask/array/tests/test_array_core.py::test_memmap",
"dask/array/tests/test_array_core.py::test_to_npy_stack",
"dask/array/tests/test_array_core.py::test_view",
"dask/array/tests/test_array_core.py::test_view_fortran",
"dask/array/tests/test_array_core.py::test_map_blocks_with_changed_dimension",
"dask/array/tests/test_array_core.py::test_broadcast_chunks",
"dask/array/tests/test_array_core.py::test_chunks_error",
"dask/array/tests/test_array_core.py::test_array_compute_forward_kwargs",
"dask/array/tests/test_array_core.py::test_dont_fuse_outputs",
"dask/array/tests/test_array_core.py::test_dont_dealias_outputs",
"dask/array/tests/test_array_core.py::test_timedelta_op",
"dask/array/tests/test_array_core.py::test_to_delayed",
"dask/array/tests/test_array_core.py::test_cumulative",
"dask/array/tests/test_array_core.py::test_eye",
"dask/array/tests/test_array_core.py::test_diag",
"dask/array/tests/test_array_core.py::test_tril_triu",
"dask/array/tests/test_array_core.py::test_tril_triu_errors",
"dask/array/tests/test_array_core.py::test_atop_names",
"dask/array/tests/test_array_core.py::test_atop_new_axes",
"dask/array/tests/test_array_core.py::test_atop_kwargs",
"dask/array/tests/test_array_core.py::test_atop_chunks",
"dask/array/tests/test_array_core.py::test_from_delayed",
"dask/array/tests/test_array_core.py::test_A_property",
"dask/array/tests/test_array_core.py::test_copy_mutate",
"dask/array/tests/test_array_core.py::test_npartitions",
"dask/array/tests/test_array_core.py::test_astype_gh1151",
"dask/array/tests/test_array_core.py::test_elemwise_name",
"dask/array/tests/test_array_core.py::test_map_blocks_name",
"dask/array/tests/test_array_core.py::test_from_array_names",
"dask/array/tests/test_array_core.py::test_array_picklable",
"dask/array/tests/test_array_core.py::test_swapaxes",
"dask/array/tests/test_array_core.py::test_from_array_raises_on_bad_chunks",
"dask/array/tests/test_array_core.py::test_concatenate_axes",
"dask/array/tests/test_array_core.py::test_atop_concatenate",
"dask/array/tests/test_array_core.py::test_common_blockdim",
"dask/array/tests/test_array_core.py::test_uneven_chunks_that_fit_neatly",
"dask/array/tests/test_array_core.py::test_elemwise_uneven_chunks",
"dask/array/tests/test_array_core.py::test_uneven_chunks_atop",
"dask/array/tests/test_array_core.py::test_warn_bad_rechunking",
"dask/array/tests/test_array_core.py::test_optimize_fuse_keys",
"dask/array/tests/test_array_core.py::test_round",
"dask/array/tests/test_array_core.py::test_repeat",
"dask/array/tests/test_array_core.py::test_concatenate_stack_dont_warn",
"dask/array/tests/test_array_core.py::test_map_blocks_delayed",
"dask/array/tests/test_array_core.py::test_no_chunks",
"dask/array/tests/test_array_core.py::test_no_chunks_2d",
"dask/array/tests/test_array_core.py::test_no_chunks_yes_chunks",
"dask/array/tests/test_array_core.py::test_raise_informative_errors_no_chunks",
"dask/array/tests/test_array_core.py::test_no_chunks_slicing_2d",
"dask/array/tests/test_array_core.py::test_index_array_with_array_1d",
"dask/array/tests/test_array_core.py::test_index_array_with_array_2d",
"dask/array/tests/test_array_core.py::test_setitem_1d",
"dask/array/tests/test_array_core.py::test_setitem_2d",
"dask/array/tests/test_array_core.py::test_setitem_errs",
"dask/array/tests/test_array_core.py::test_zero_slice_dtypes",
"dask/array/tests/test_array_core.py::test_zero_sized_array_rechunk",
"dask/array/tests/test_array_core.py::test_atop_zero_shape",
"dask/array/tests/test_array_core.py::test_atop_zero_shape_new_axes",
"dask/array/tests/test_array_core.py::test_broadcast_against_zero_shape",
"dask/array/tests/test_array_core.py::test_fast_from_array",
"dask/array/tests/test_array_core.py::test_random_from_array",
"dask/array/tests/test_array_core.py::test_concatenate_errs",
"dask/array/tests/test_array_core.py::test_stack_errs",
"dask/array/tests/test_array_core.py::test_transpose_negative_axes",
"dask/array/tests/test_array_core.py::test_atop_with_numpy_arrays"
]
| []
| BSD 3-Clause "New" or "Revised" License | 1,135 | [
"dask/array/core.py",
"dask/array/__init__.py",
"docs/source/array-api.rst"
]
| [
"dask/array/core.py",
"dask/array/__init__.py",
"docs/source/array-api.rst"
]
|
|
kytos__python-openflow-343 | e221a33f932eb6aa7ceeb45d6bc8130baac0b7f7 | 2017-03-31 20:31:56 | f889596c6b8acd560a8f4177c672815c245efde5 | diff --git a/pyof/v0x01/common/queue.py b/pyof/v0x01/common/queue.py
index 8bb6fd0..b33d776 100644
--- a/pyof/v0x01/common/queue.py
+++ b/pyof/v0x01/common/queue.py
@@ -49,21 +49,21 @@ class ListOfProperties(FixedTypeList):
class QueuePropHeader(GenericStruct):
"""Describe the header of each queue property."""
- property = UBInt16(enum_ref=QueueProperties)
- len = UBInt16()
+ queue_property = UBInt16(enum_ref=QueueProperties)
+ length = UBInt16()
#: 64-bit alignment
pad = Pad(4)
- def __init__(self, prop=None, length=None):
+ def __init__(self, queue_property=None, length=None):
"""The contructor takes the paremeters below.
Args:
- property (QueueProperties): The queue property.
- len (int): Length of property, including this header.
+ queue_property (QueueProperties): The queue property.
+ length (int): Length of property, including this header.
"""
super().__init__()
- self.property = prop
- self.len = length
+ self.queue_property = queue_property
+ self.length = length
class PacketQueue(GenericStruct):
@@ -93,8 +93,8 @@ class PacketQueue(GenericStruct):
class QueuePropMinRate(GenericStruct):
"""Define the minimum-rate type queue."""
- prop_header = QueuePropHeader(prop=QueueProperties.OFPQT_MIN_RATE,
- length=16)
+ prop_header = QueuePropHeader(
+ queue_property=QueueProperties.OFPQT_MIN_RATE, length=16)
rate = UBInt16()
#: 64-bit alignmet.
pad = Pad(6)
diff --git a/pyof/v0x04/common/queue.py b/pyof/v0x04/common/queue.py
index 56b3f37..c48f592 100644
--- a/pyof/v0x04/common/queue.py
+++ b/pyof/v0x04/common/queue.py
@@ -35,22 +35,22 @@ class QueuePropHeader(GenericStruct):
"""Describe the header of each queue property."""
#: One of OFPQT_*
- property = UBInt16(enum_ref=QueueProperties)
+ queue_property = UBInt16(enum_ref=QueueProperties)
#: Length of property, including this header
length = UBInt16()
#: 64-bit alignment
pad = Pad(4)
# pylint: disable=redefined-builtin
- def __init__(self, property=None, length=None):
+ def __init__(self, queue_property=None, length=None):
"""The contructor takes the paremeters below.
Args:
- property (QueueProperties): The queue property.
- len (int): Length of property, including this header.
+ queue_property (QueueProperties): The queue property.
+ length (int): Length of property, including this header.
"""
super().__init__()
- self.property = property
+ self.queue_property = queue_property
self.length = length
@@ -124,8 +124,8 @@ class ListOfQueues(FixedTypeList):
class QueuePropExperimenter(GenericStruct):
"""Experimenter queue property uses the following structure and fields."""
- prop_header = QueuePropHeader(property=QueueProperties.OFPQT_EXPERIMENTER,
- length=16)
+ prop_header = QueuePropHeader(
+ queue_property=QueueProperties.OFPQT_EXPERIMENTER, length=16)
#: Experimenter ID which takes the same form as in struct
#: ofp_experimenter_header
experimenter = UBInt32()
@@ -150,8 +150,8 @@ class QueuePropExperimenter(GenericStruct):
class QueuePropMaxRate(GenericStruct):
"""Maximum-rate queue property uses the following structure and fields."""
- prop_header = QueuePropHeader(property=QueueProperties.OFPQT_MAX_RATE,
- length=16)
+ prop_header = QueuePropHeader(
+ queue_property=QueueProperties.OFPQT_MAX_RATE, length=16)
#: In 1/10 of a percent; >1000 -> disabled.
rate = UBInt16()
#: 64-bit alignmet.
@@ -170,8 +170,8 @@ class QueuePropMaxRate(GenericStruct):
class QueuePropMinRate(GenericStruct):
"""Minimum-rate queue property uses the following structure and fields."""
- prop_header = QueuePropHeader(property=QueueProperties.OFPQT_MIN_RATE,
- length=16)
+ prop_header = QueuePropHeader(
+ queue_property=QueueProperties.OFPQT_MIN_RATE, length=16)
#: In 1/10 of a percent; >1000 -> disabled.
rate = UBInt16()
#: 64-bit alignmet.
| Attribute with builtin name
In `/v0x04/common/queue.py`, `class QueuePropHeader` has an attribute called `property`, which is a Python builtin name. Is this needed somehow? | kytos/python-openflow | diff --git a/tests/v0x01/test_common/test_queue.py b/tests/v0x01/test_common/test_queue.py
index 89ae5ed..6054e5b 100644
--- a/tests/v0x01/test_common/test_queue.py
+++ b/tests/v0x01/test_common/test_queue.py
@@ -10,8 +10,8 @@ class TestQueuePropHeader(unittest.TestCase):
def setUp(self):
"""Basic setup for test."""
self.message = queue.QueuePropHeader()
- self.message.property = queue.QueueProperties.OFPQT_MIN_RATE
- self.message.len = 12
+ self.message.queue_property = queue.QueueProperties.OFPQT_MIN_RATE
+ self.message.length = 12
def test_get_size(self):
"""[Common/QueuePropHeader] - size 8."""
diff --git a/tests/v0x01/test_controller2switch/test_queue_get_config_reply.py b/tests/v0x01/test_controller2switch/test_queue_get_config_reply.py
index 1d7ad38..46db67f 100644
--- a/tests/v0x01/test_controller2switch/test_queue_get_config_reply.py
+++ b/tests/v0x01/test_controller2switch/test_queue_get_config_reply.py
@@ -32,6 +32,6 @@ def _get_packet_queue():
def _get_queue_properties():
"""Function used to return a list of queue properties."""
properties = []
- properties.append(QueuePropHeader(prop=QueueProperties.OFPQT_MIN_RATE,
- length=12))
+ properties.append(QueuePropHeader(
+ queue_property=QueueProperties.OFPQT_MIN_RATE, length=12))
return properties
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 2
} | 2017.11 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"pytest-xdist",
"pytest-mock",
"pytest-asyncio"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements.txt",
"requirements-dev.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.16
astroid==3.3.9
babel==2.17.0
certifi==2025.1.31
charset-normalizer==3.4.1
colorama==0.4.6
coverage==7.8.0
dill==0.3.9
docutils==0.21.2
exceptiongroup==1.2.2
execnet==2.1.1
idna==3.10
imagesize==1.4.1
importlib_metadata==8.6.1
iniconfig==2.1.0
isort==6.0.1
Jinja2==3.1.6
mando==0.7.1
MarkupSafe==3.0.2
mccabe==0.7.0
packaging==24.2
platformdirs==4.3.7
pluggy==1.5.0
pycodestyle==2.13.0
pydocstyle==6.3.0
pyflakes==3.3.2
Pygments==2.19.1
pylama==8.4.1
pylama-pylint==3.1.1
pylint==3.3.6
pytest==8.3.5
pytest-asyncio==0.26.0
pytest-cov==6.0.0
pytest-mock==3.14.0
pytest-xdist==3.6.1
-e git+https://github.com/kytos/python-openflow.git@e221a33f932eb6aa7ceeb45d6bc8130baac0b7f7#egg=python_openflow
radon==6.0.1
requests==2.32.3
six==1.17.0
snowballstemmer==2.2.0
Sphinx==7.4.7
sphinx-bootstrap-theme==0.8.1
sphinxcontrib-applehelp==2.0.0
sphinxcontrib-devhelp==2.0.0
sphinxcontrib-htmlhelp==2.1.0
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==2.0.0
sphinxcontrib-serializinghtml==2.0.0
tomli==2.2.1
tomlkit==0.13.2
typing_extensions==4.13.0
urllib3==2.3.0
zipp==3.21.0
| name: python-openflow
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.16
- astroid==3.3.9
- babel==2.17.0
- certifi==2025.1.31
- charset-normalizer==3.4.1
- colorama==0.4.6
- coverage==7.8.0
- dill==0.3.9
- docutils==0.21.2
- exceptiongroup==1.2.2
- execnet==2.1.1
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==8.6.1
- iniconfig==2.1.0
- isort==6.0.1
- jinja2==3.1.6
- mando==0.7.1
- markupsafe==3.0.2
- mccabe==0.7.0
- packaging==24.2
- platformdirs==4.3.7
- pluggy==1.5.0
- pycodestyle==2.13.0
- pydocstyle==6.3.0
- pyflakes==3.3.2
- pygments==2.19.1
- pylama==8.4.1
- pylama-pylint==3.1.1
- pylint==3.3.6
- pytest==8.3.5
- pytest-asyncio==0.26.0
- pytest-cov==6.0.0
- pytest-mock==3.14.0
- pytest-xdist==3.6.1
- radon==6.0.1
- requests==2.32.3
- six==1.17.0
- snowballstemmer==2.2.0
- sphinx==7.4.7
- sphinx-bootstrap-theme==0.8.1
- sphinxcontrib-applehelp==2.0.0
- sphinxcontrib-devhelp==2.0.0
- sphinxcontrib-htmlhelp==2.1.0
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==2.0.0
- sphinxcontrib-serializinghtml==2.0.0
- tomli==2.2.1
- tomlkit==0.13.2
- typing-extensions==4.13.0
- urllib3==2.3.0
- zipp==3.21.0
prefix: /opt/conda/envs/python-openflow
| [
"tests/v0x01/test_controller2switch/test_queue_get_config_reply.py::TestQueueGetConfigReply::test_minimum_size",
"tests/v0x01/test_controller2switch/test_queue_get_config_reply.py::TestQueueGetConfigReply::test_pack",
"tests/v0x01/test_controller2switch/test_queue_get_config_reply.py::TestQueueGetConfigReply::test_raw_dump_size",
"tests/v0x01/test_controller2switch/test_queue_get_config_reply.py::TestQueueGetConfigReply::test_unpack"
]
| [
"tests/v0x01/test_controller2switch/test_queue_get_config_reply.py::TestStruct::test_minimum_size",
"tests/v0x01/test_controller2switch/test_queue_get_config_reply.py::TestStruct::test_pack",
"tests/v0x01/test_controller2switch/test_queue_get_config_reply.py::TestStruct::test_raw_dump_size",
"tests/v0x01/test_controller2switch/test_queue_get_config_reply.py::TestStruct::test_unpack"
]
| [
"tests/v0x01/test_common/test_queue.py::TestQueuePropHeader::test_get_size",
"tests/v0x01/test_common/test_queue.py::TestPacketQueue::test_get_size",
"tests/v0x01/test_common/test_queue.py::TestQueuePropMinRate::test_get_size"
]
| []
| MIT License | 1,137 | [
"pyof/v0x04/common/queue.py",
"pyof/v0x01/common/queue.py"
]
| [
"pyof/v0x04/common/queue.py",
"pyof/v0x01/common/queue.py"
]
|
|
codecov__codecov-python-94 | 7b292778b8e888ee181c9537cdbfa9eff2acf25c | 2017-03-31 20:44:02 | ba51a78ad0841d26a856641051fd94aed44d4907 | diff --git a/.gitignore b/.gitignore
index b0b83c5..79d2d87 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,6 +1,7 @@
*.egg
*.egg-info
*.pyc
+.cache
.coverage
.DS_Store
.tox
diff --git a/codecov/__init__.py b/codecov/__init__.py
index 87cf47f..955adc3 100644
--- a/codecov/__init__.py
+++ b/codecov/__init__.py
@@ -276,10 +276,10 @@ def main(*argv, **kwargs):
if os.getenv('JENKINS_URL'):
# https://wiki.jenkins-ci.org/display/JENKINS/Building+a+software+project
# https://wiki.jenkins-ci.org/display/JENKINS/GitHub+pull+request+builder+plugin#GitHubpullrequestbuilderplugin-EnvironmentVariables
- query.update(dict(branch=os.getenv('ghprbSourceBranch') or os.getenv('GIT_BRANCH'),
+ query.update(dict(branch=os.getenv('ghprbSourceBranch') or os.getenv('GIT_BRANCH') or os.getenv('BRANCH_NAME'),
service='jenkins',
commit=os.getenv('ghprbActualCommit') or os.getenv('GIT_COMMIT'),
- pr=os.getenv('ghprbPullId', 'false'),
+ pr=os.getenv('ghprbPullId') or os.getenv('CHANGE_ID'),
build=os.getenv('BUILD_NUMBER'),
build_url=os.getenv('BUILD_URL')))
root = os.getenv('WORKSPACE') or root
| Add support for Jenkins Blue Ocean
See codecov/codecov-node#55 | codecov/codecov-python | diff --git a/tests/test.py b/tests/test.py
index cf54afe..2b7b98d 100644
--- a/tests/test.py
+++ b/tests/test.py
@@ -46,7 +46,7 @@ class TestUploader(unittest.TestCase):
"APPVEYOR_BUILD_VERSION", "APPVEYOR_JOB_ID", "APPVEYOR_REPO_NAME", "APPVEYOR_REPO_COMMIT", "WERCKER_GIT_BRANCH",
"WERCKER_MAIN_PIPELINE_STARTED", "WERCKER_GIT_OWNER", "WERCKER_GIT_REPOSITORY",
"CI_BUILD_REF_NAME", "CI_BUILD_ID", "CI_BUILD_REPO", "CI_PROJECT_DIR", "CI_BUILD_REF", "CI_SERVER_NAME",
- "ghprbActualCommit", "ghprbSourceBranch", "ghprbPullId", "WERCKER_GIT_COMMIT"):
+ "ghprbActualCommit", "ghprbSourceBranch", "ghprbPullId", "WERCKER_GIT_COMMIT", "CHANGE_ID"):
os.environ[key] = ""
def tearDown(self):
@@ -339,6 +339,23 @@ class TestUploader(unittest.TestCase):
self.assertEqual(res['query']['branch'], 'master')
self.assertEqual(res['codecov'].token, 'token')
+ def test_ci_jenkins_blue_ocean(self):
+ self.set_env(JENKINS_URL='https://....',
+ BUILD_URL='https://....',
+ BRANCH_NAME='master',
+ CHANGE_ID='1',
+ BUILD_NUMBER='41',
+ CODECOV_TOKEN='token')
+ self.fake_report()
+ res = self.run_cli()
+ self.assertEqual(res['query']['service'], 'jenkins')
+ self.assertEqual(res['query']['commit'], codecov.check_output(("git", "rev-parse", "HEAD")))
+ self.assertEqual(res['query']['build'], '41')
+ self.assertEqual(res['query']['build_url'], 'https://....')
+ self.assertEqual(res['query']['pr'], '1')
+ self.assertEqual(res['query']['branch'], 'master')
+ self.assertEqual(res['codecov'].token, 'token')
+
def test_ci_travis(self):
self.set_env(TRAVIS="true",
TRAVIS_BRANCH="master",
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 2
} | 2.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.4",
"reqs_path": [
"tests/requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
charset-normalizer==2.0.12
-e git+https://github.com/codecov/codecov-python.git@7b292778b8e888ee181c9537cdbfa9eff2acf25c#egg=codecov
coverage==6.2
ddt==1.7.2
funcsigs==1.0.2
idna==3.10
importlib-metadata==4.8.3
iniconfig==1.1.1
linecache2==1.0.0
mock==5.2.0
packaging==21.3
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
pytest-cov==4.0.0
requests==2.27.1
six==1.17.0
tomli==1.2.3
traceback2==1.4.0
typing_extensions==4.1.1
unittest2==1.1.0
urllib3==1.26.20
zipp==3.6.0
| name: codecov-python
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- argparse==1.4.0
- attrs==22.2.0
- charset-normalizer==2.0.12
- coverage==6.2
- ddt==1.7.2
- funcsigs==1.0.2
- idna==3.10
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- linecache2==1.0.0
- mock==5.2.0
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-cov==4.0.0
- requests==2.27.1
- six==1.17.0
- tomli==1.2.3
- traceback2==1.4.0
- typing-extensions==4.1.1
- unittest2==1.1.0
- urllib3==1.26.20
- zipp==3.6.0
prefix: /opt/conda/envs/codecov-python
| [
"tests/test.py::TestUploader::test_ci_jenkins_blue_ocean"
]
| []
| [
"tests/test.py::TestUploader::test_bowerrc",
"tests/test.py::TestUploader::test_bowerrc_none",
"tests/test.py::TestUploader::test_ci_appveyor",
"tests/test.py::TestUploader::test_ci_buildkite",
"tests/test.py::TestUploader::test_ci_circleci",
"tests/test.py::TestUploader::test_ci_codeship",
"tests/test.py::TestUploader::test_ci_drone",
"tests/test.py::TestUploader::test_ci_gitlab",
"tests/test.py::TestUploader::test_ci_jenkins",
"tests/test.py::TestUploader::test_ci_jenkins_env",
"tests/test.py::TestUploader::test_ci_magnum",
"tests/test.py::TestUploader::test_ci_none",
"tests/test.py::TestUploader::test_ci_semaphore",
"tests/test.py::TestUploader::test_ci_shippable",
"tests/test.py::TestUploader::test_ci_snap",
"tests/test.py::TestUploader::test_ci_travis",
"tests/test.py::TestUploader::test_ci_wercker",
"tests/test.py::TestUploader::test_command",
"tests/test.py::TestUploader::test_disable_detect",
"tests/test.py::TestUploader::test_disable_search",
"tests/test.py::TestUploader::test_discovers",
"tests/test.py::TestUploader::test_exits_0",
"tests/test.py::TestUploader::test_exits_1",
"tests/test.py::TestUploader::test_ignore_report_1__coverage_worker10",
"tests/test.py::TestUploader::test_ignore_report_2_coverage_jade",
"tests/test.py::TestUploader::test_ignore_report_3_include_lst",
"tests/test.py::TestUploader::test_ignore_report_4_inputFiles_lst",
"tests/test.py::TestUploader::test_ignore_report_5_createdFiles_lst",
"tests/test.py::TestUploader::test_ignore_report_6_scoverage_measurements_blackandwhite_xml",
"tests/test.py::TestUploader::test_ignore_report_7_test_hello_coverage_txt",
"tests/test.py::TestUploader::test_ignore_report_8_conftest_blackwhite_c_gcov",
"tests/test.py::TestUploader::test_ignored_path_01_vendor",
"tests/test.py::TestUploader::test_ignored_path_02_node_modules",
"tests/test.py::TestUploader::test_ignored_path_03_js_generated_coverage",
"tests/test.py::TestUploader::test_ignored_path_04___pycache__",
"tests/test.py::TestUploader::test_ignored_path_05_coverage_instrumented",
"tests/test.py::TestUploader::test_ignored_path_06_build_lib",
"tests/test.py::TestUploader::test_ignored_path_07_htmlcov",
"tests/test.py::TestUploader::test_ignored_path_08__egg_info",
"tests/test.py::TestUploader::test_ignored_path_09__git",
"tests/test.py::TestUploader::test_ignored_path_10__tox",
"tests/test.py::TestUploader::test_ignored_path_11_venv",
"tests/test.py::TestUploader::test_ignored_path_12__venv_python_2_7",
"tests/test.py::TestUploader::test_include_env",
"tests/test.py::TestUploader::test_is_report_01_coverage_xml",
"tests/test.py::TestUploader::test_is_report_02_jacoco_xml",
"tests/test.py::TestUploader::test_is_report_03_jacocoTestResults_xml",
"tests/test.py::TestUploader::test_is_report_04_coverage_txt",
"tests/test.py::TestUploader::test_is_report_05_gcov_lst",
"tests/test.py::TestUploader::test_is_report_06_cov_gcov",
"tests/test.py::TestUploader::test_is_report_07_info_lcov",
"tests/test.py::TestUploader::test_is_report_08_clover_xml",
"tests/test.py::TestUploader::test_is_report_09_cobertura_xml",
"tests/test.py::TestUploader::test_is_report_10_luacov_report_out",
"tests/test.py::TestUploader::test_is_report_11_gcov_info",
"tests/test.py::TestUploader::test_is_report_12_nosetests_xml",
"tests/test.py::TestUploader::test_none_found",
"tests/test.py::TestUploader::test_not_jacoco",
"tests/test.py::TestUploader::test_read_token_file",
"tests/test.py::TestUploader::test_require_branch_1",
"tests/test.py::TestUploader::test_returns_none",
"tests/test.py::TestUploader::test_run_coverage_fails",
"tests/test.py::TestUploader::test_send",
"tests/test.py::TestUploader::test_send_error"
]
| []
| Apache License 2.0 | 1,138 | [
".gitignore",
"codecov/__init__.py"
]
| [
".gitignore",
"codecov/__init__.py"
]
|
|
Azure__WALinuxAgent-642 | c6704cbad83b93763cf2403a77197559a67b9dca | 2017-03-31 23:16:29 | 6e9b985c1d7d564253a1c344bab01b45093103cd | diff --git a/azurelinuxagent/common/event.py b/azurelinuxagent/common/event.py
index 9265820a..fb2c97c7 100644
--- a/azurelinuxagent/common/event.py
+++ b/azurelinuxagent/common/event.py
@@ -65,8 +65,17 @@ class EventLogger(object):
if not os.path.exists(self.event_dir):
os.mkdir(self.event_dir)
os.chmod(self.event_dir, 0o700)
- if len(os.listdir(self.event_dir)) > 1000:
- raise EventError("Too many files under: {0}".format(self.event_dir))
+
+ existing_events = os.listdir(self.event_dir)
+ if len(existing_events) >= 1000:
+ existing_events.sort()
+ oldest_files = existing_events[:-999]
+ logger.warn("Too many files under: {0}, removing oldest".format(self.event_dir))
+ try:
+ for f in oldest_files:
+ os.remove(os.path.join(self.event_dir, f))
+ except IOError as e:
+ raise EventError(e)
filename = os.path.join(self.event_dir,
ustr(int(time.time() * 1000000)))
diff --git a/azurelinuxagent/ga/exthandlers.py b/azurelinuxagent/ga/exthandlers.py
index 70bb246e..9d3cec3f 100644
--- a/azurelinuxagent/ga/exthandlers.py
+++ b/azurelinuxagent/ga/exthandlers.py
@@ -221,9 +221,9 @@ class ExtHandlersHandler(object):
self.log_etag = True
- state = ext_handler.properties.state
+ state = ext_handler.properties.state.lower()
ext_handler_i.logger.info("Expected handler state: {0}", state)
- if state == "enabled":
+ if state == u"enabled":
self.handle_enable(ext_handler_i)
elif state == u"disabled":
self.handle_disable(ext_handler_i)
@@ -709,7 +709,7 @@ class ExtHandlerInstance(object):
heartbeat = json.loads(heartbeat_json)[0]['heartbeat']
except IOError as e:
raise ExtensionError("Failed to get heartbeat file:{0}".format(e))
- except ValueError as e:
+ except (ValueError, KeyError) as e:
raise ExtensionError("Malformed heartbeat file: {0}".format(e))
return heartbeat
diff --git a/azurelinuxagent/ga/update.py b/azurelinuxagent/ga/update.py
index 53f01736..61d9f470 100644
--- a/azurelinuxagent/ga/update.py
+++ b/azurelinuxagent/ga/update.py
@@ -27,6 +27,7 @@ import signal
import subprocess
import sys
import time
+import traceback
import zipfile
import azurelinuxagent.common.conf as conf
@@ -250,6 +251,7 @@ class UpdateHandler(object):
except Exception as e:
logger.warn(u"Agent {0} failed with exception: {1}", CURRENT_AGENT, ustr(e))
+ logger.warn(traceback.format_exc())
sys.exit(1)
return
| Agent crash should log traceback
When the extension handler crashes we get a generic 'agent crashed', we should log a traceback here. | Azure/WALinuxAgent | diff --git a/tests/common/test_event.py b/tests/common/test_event.py
new file mode 100644
index 00000000..726474cd
--- /dev/null
+++ b/tests/common/test_event.py
@@ -0,0 +1,90 @@
+# Copyright 2017 Microsoft Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Requires Python 2.4+ and Openssl 1.0+
+#
+
+from __future__ import print_function
+
+from tests.tools import *
+from azurelinuxagent.common.event import *
+
+
+class TestEvent(AgentTestCase):
+ def test_save_event(self):
+ tmp_evt = tempfile.mkdtemp()
+ init_event_logger(tmp_evt)
+ add_event('test', message='test event')
+ self.assertTrue(len(os.listdir(tmp_evt)) == 1)
+ shutil.rmtree(tmp_evt)
+
+ def test_save_event_rollover(self):
+ tmp_evt = tempfile.mkdtemp()
+ init_event_logger(tmp_evt)
+ add_event('test', message='first event')
+ for i in range(0, 999):
+ add_event('test', message='test event {0}'.format(i))
+
+ events = os.listdir(tmp_evt)
+ events.sort()
+ self.assertTrue(len(events) == 1000)
+
+ first_event = os.path.join(tmp_evt, events[0])
+ with open(first_event) as first_fh:
+ first_event_text = first_fh.read()
+ self.assertTrue('first event' in first_event_text)
+
+ add_event('test', message='last event')
+ events = os.listdir(tmp_evt)
+ events.sort()
+ self.assertTrue(len(events) == 1000, "{0} events found, 1000 expected".format(len(events)))
+
+ first_event = os.path.join(tmp_evt, events[0])
+ with open(first_event) as first_fh:
+ first_event_text = first_fh.read()
+ self.assertFalse('first event' in first_event_text)
+ self.assertTrue('test event 0' in first_event_text)
+
+ last_event = os.path.join(tmp_evt, events[-1])
+ with open(last_event) as last_fh:
+ last_event_text = last_fh.read()
+ self.assertTrue('last event' in last_event_text)
+
+ shutil.rmtree(tmp_evt)
+
+ def test_save_event_cleanup(self):
+ tmp_evt = tempfile.mkdtemp()
+ init_event_logger(tmp_evt)
+
+ for i in range(0, 2000):
+ evt = os.path.join(tmp_evt, '{0}.tld'.format(ustr(1491004920536531 + i)))
+ with open(evt, 'w') as fh:
+ fh.write('test event {0}'.format(i))
+
+ events = os.listdir(tmp_evt)
+ self.assertTrue(len(events) == 2000, "{0} events found, 2000 expected".format(len(events)))
+ add_event('test', message='last event')
+
+ events = os.listdir(tmp_evt)
+ events.sort()
+ self.assertTrue(len(events) == 1000, "{0} events found, 1000 expected".format(len(events)))
+ first_event = os.path.join(tmp_evt, events[0])
+ with open(first_event) as first_fh:
+ first_event_text = first_fh.read()
+ self.assertTrue('test event 1001' in first_event_text)
+
+ last_event = os.path.join(tmp_evt, events[-1])
+ with open(last_event) as last_fh:
+ last_event_text = last_fh.read()
+ self.assertTrue('last event' in last_event_text)
diff --git a/tests/ga/test_update.py b/tests/ga/test_update.py
index 46502cc7..46d06735 100644
--- a/tests/ga/test_update.py
+++ b/tests/ga/test_update.py
@@ -1083,7 +1083,6 @@ class TestUpdate(UpdateTestCase):
self._test_run_latest(mock_child=mock_child, mock_time=mock_time)
self.assertEqual(1, mock_child.poll.call_count)
self.assertEqual(0, mock_child.wait.call_count)
- self.assertEqual(2, mock_time.time_call_count)
return
def test_run_latest_defaults_to_current(self):
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 0,
"test_score": 3
},
"num_modified_files": 3
} | 2.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pyasn1",
"nose",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.4",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
certifi==2021.5.30
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
nose==1.3.7
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pyasn1==0.5.1
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
-e git+https://github.com/Azure/WALinuxAgent.git@c6704cbad83b93763cf2403a77197559a67b9dca#egg=WALinuxAgent
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: WALinuxAgent
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib-metadata=4.8.1=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- nose==1.3.7
- pyasn1==0.5.1
prefix: /opt/conda/envs/WALinuxAgent
| [
"tests/common/test_event.py::TestEvent::test_save_event_cleanup",
"tests/common/test_event.py::TestEvent::test_save_event_rollover"
]
| []
| [
"tests/common/test_event.py::TestEvent::test_save_event",
"tests/ga/test_update.py::TestGuestAgentError::test_clear",
"tests/ga/test_update.py::TestGuestAgentError::test_creation",
"tests/ga/test_update.py::TestGuestAgentError::test_load_preserves_error_state",
"tests/ga/test_update.py::TestGuestAgentError::test_mark_failure",
"tests/ga/test_update.py::TestGuestAgentError::test_mark_failure_permanent",
"tests/ga/test_update.py::TestGuestAgentError::test_save",
"tests/ga/test_update.py::TestGuestAgentError::test_str",
"tests/ga/test_update.py::TestGuestAgent::test_clear_error",
"tests/ga/test_update.py::TestGuestAgent::test_creation",
"tests/ga/test_update.py::TestGuestAgent::test_download",
"tests/ga/test_update.py::TestGuestAgent::test_download_fail",
"tests/ga/test_update.py::TestGuestAgent::test_download_fallback",
"tests/ga/test_update.py::TestGuestAgent::test_ensure_download_skips_blacklisted",
"tests/ga/test_update.py::TestGuestAgent::test_ensure_downloaded",
"tests/ga/test_update.py::TestGuestAgent::test_ensure_downloaded_download_fails",
"tests/ga/test_update.py::TestGuestAgent::test_ensure_downloaded_load_manifest_fails",
"tests/ga/test_update.py::TestGuestAgent::test_ensure_downloaded_unpack_fails",
"tests/ga/test_update.py::TestGuestAgent::test_is_available",
"tests/ga/test_update.py::TestGuestAgent::test_is_blacklisted",
"tests/ga/test_update.py::TestGuestAgent::test_is_downloaded",
"tests/ga/test_update.py::TestGuestAgent::test_load_error",
"tests/ga/test_update.py::TestGuestAgent::test_load_manifest",
"tests/ga/test_update.py::TestGuestAgent::test_load_manifest_is_empty",
"tests/ga/test_update.py::TestGuestAgent::test_load_manifest_is_malformed",
"tests/ga/test_update.py::TestGuestAgent::test_load_manifest_missing",
"tests/ga/test_update.py::TestGuestAgent::test_mark_failure",
"tests/ga/test_update.py::TestGuestAgent::test_unpack",
"tests/ga/test_update.py::TestGuestAgent::test_unpack_fail",
"tests/ga/test_update.py::TestUpdate::test_creation",
"tests/ga/test_update.py::TestUpdate::test_emit_restart_event_emits_event_if_not_clean_start",
"tests/ga/test_update.py::TestUpdate::test_emit_restart_event_writes_sentinal_file",
"tests/ga/test_update.py::TestUpdate::test_ensure_no_orphans",
"tests/ga/test_update.py::TestUpdate::test_ensure_no_orphans_ignores_exceptions",
"tests/ga/test_update.py::TestUpdate::test_ensure_no_orphans_kills_after_interval",
"tests/ga/test_update.py::TestUpdate::test_ensure_no_orphans_skips_if_no_orphans",
"tests/ga/test_update.py::TestUpdate::test_evaluate_agent_health_ignores_installed_agent",
"tests/ga/test_update.py::TestUpdate::test_evaluate_agent_health_raises_exception_for_restarting_agent",
"tests/ga/test_update.py::TestUpdate::test_evaluate_agent_health_resets_with_new_agent",
"tests/ga/test_update.py::TestUpdate::test_evaluate_agent_health_will_not_raise_exception_for_long_restarts",
"tests/ga/test_update.py::TestUpdate::test_evaluate_agent_health_will_not_raise_exception_too_few_restarts",
"tests/ga/test_update.py::TestUpdate::test_filter_blacklisted_agents",
"tests/ga/test_update.py::TestUpdate::test_get_latest_agent",
"tests/ga/test_update.py::TestUpdate::test_get_latest_agent_excluded",
"tests/ga/test_update.py::TestUpdate::test_get_latest_agent_no_updates",
"tests/ga/test_update.py::TestUpdate::test_get_latest_agent_skip_updates",
"tests/ga/test_update.py::TestUpdate::test_get_latest_agent_skips_unavailable",
"tests/ga/test_update.py::TestUpdate::test_get_pid_files",
"tests/ga/test_update.py::TestUpdate::test_get_pid_files_returns_previous",
"tests/ga/test_update.py::TestUpdate::test_is_clean_start_returns_false_for_current_agent",
"tests/ga/test_update.py::TestUpdate::test_is_clean_start_returns_false_for_exceptions",
"tests/ga/test_update.py::TestUpdate::test_is_clean_start_returns_true_sentinal_agent_is_not_current",
"tests/ga/test_update.py::TestUpdate::test_is_clean_start_returns_true_when_no_sentinal",
"tests/ga/test_update.py::TestUpdate::test_is_orphaned_returns_false_if_parent_exists",
"tests/ga/test_update.py::TestUpdate::test_is_orphaned_returns_true_if_parent_does_not_exist",
"tests/ga/test_update.py::TestUpdate::test_is_orphaned_returns_true_if_parent_is_init",
"tests/ga/test_update.py::TestUpdate::test_load_agents",
"tests/ga/test_update.py::TestUpdate::test_load_agents_does_reload",
"tests/ga/test_update.py::TestUpdate::test_load_agents_sorts",
"tests/ga/test_update.py::TestUpdate::test_purge_agents",
"tests/ga/test_update.py::TestUpdate::test_run",
"tests/ga/test_update.py::TestUpdate::test_run_clears_sentinal_on_successful_exit",
"tests/ga/test_update.py::TestUpdate::test_run_emits_restart_event",
"tests/ga/test_update.py::TestUpdate::test_run_keeps_running",
"tests/ga/test_update.py::TestUpdate::test_run_latest",
"tests/ga/test_update.py::TestUpdate::test_run_latest_captures_signals",
"tests/ga/test_update.py::TestUpdate::test_run_latest_creates_only_one_signal_handler",
"tests/ga/test_update.py::TestUpdate::test_run_latest_defaults_to_current",
"tests/ga/test_update.py::TestUpdate::test_run_latest_exception_blacklists",
"tests/ga/test_update.py::TestUpdate::test_run_latest_forwards_output",
"tests/ga/test_update.py::TestUpdate::test_run_latest_nonzero_code_marks_failures",
"tests/ga/test_update.py::TestUpdate::test_run_latest_polling_stops_at_failure",
"tests/ga/test_update.py::TestUpdate::test_run_latest_polling_stops_at_success",
"tests/ga/test_update.py::TestUpdate::test_run_latest_polls_and_waits_for_success",
"tests/ga/test_update.py::TestUpdate::test_run_leaves_sentinal_on_unsuccessful_exit",
"tests/ga/test_update.py::TestUpdate::test_run_stops_if_orphaned",
"tests/ga/test_update.py::TestUpdate::test_run_stops_if_update_available",
"tests/ga/test_update.py::TestUpdate::test_set_agents_sets_agents",
"tests/ga/test_update.py::TestUpdate::test_set_agents_sorts_agents",
"tests/ga/test_update.py::TestUpdate::test_set_sentinal",
"tests/ga/test_update.py::TestUpdate::test_set_sentinal_writes_current_agent",
"tests/ga/test_update.py::TestUpdate::test_shutdown",
"tests/ga/test_update.py::TestUpdate::test_shutdown_ignores_exceptions",
"tests/ga/test_update.py::TestUpdate::test_shutdown_ignores_missing_sentinal_file",
"tests/ga/test_update.py::TestUpdate::test_upgrade_available_handles_missing_family",
"tests/ga/test_update.py::TestUpdate::test_upgrade_available_includes_old_agents",
"tests/ga/test_update.py::TestUpdate::test_upgrade_available_purges_old_agents",
"tests/ga/test_update.py::TestUpdate::test_upgrade_available_returns_true_on_first_use",
"tests/ga/test_update.py::TestUpdate::test_upgrade_available_skips_if_too_frequent",
"tests/ga/test_update.py::TestUpdate::test_upgrade_available_skips_if_when_no_new_versions",
"tests/ga/test_update.py::TestUpdate::test_upgrade_available_skips_when_no_versions",
"tests/ga/test_update.py::TestUpdate::test_upgrade_available_skips_when_updates_are_disabled",
"tests/ga/test_update.py::TestUpdate::test_upgrade_available_sorts",
"tests/ga/test_update.py::TestUpdate::test_write_pid_file",
"tests/ga/test_update.py::TestUpdate::test_write_pid_file_ignores_exceptions"
]
| []
| Apache License 2.0 | 1,139 | [
"azurelinuxagent/common/event.py",
"azurelinuxagent/ga/exthandlers.py",
"azurelinuxagent/ga/update.py"
]
| [
"azurelinuxagent/common/event.py",
"azurelinuxagent/ga/exthandlers.py",
"azurelinuxagent/ga/update.py"
]
|
|
borgbackup__borg-2374 | f878678b0c1df0e4c4971e7ef668e49a889e0545 | 2017-04-01 19:10:49 | a439fa3e720c8bb2a82496768ffcce282fb7f7b7 | codecov-io: # [Codecov](https://codecov.io/gh/borgbackup/borg/pull/2374?src=pr&el=h1) Report
> Merging [#2374](https://codecov.io/gh/borgbackup/borg/pull/2374?src=pr&el=desc) into [master](https://codecov.io/gh/borgbackup/borg/commit/d79da81d2244c328387cde3bbc1a83e36883a2d6?src=pr&el=desc) will **decrease** coverage by `0.11%`.
> The diff coverage is `100%`.
[](https://codecov.io/gh/borgbackup/borg/pull/2374?src=pr&el=tree)
```diff
@@ Coverage Diff @@
## master #2374 +/- ##
==========================================
- Coverage 83.08% 82.96% -0.12%
==========================================
Files 20 20
Lines 7613 7615 +2
Branches 1294 1295 +1
==========================================
- Hits 6325 6318 -7
- Misses 929 936 +7
- Partials 359 361 +2
```
| [Impacted Files](https://codecov.io/gh/borgbackup/borg/pull/2374?src=pr&el=tree) | Coverage Δ | |
|---|---|---|
| [src/borg/archiver.py](https://codecov.io/gh/borgbackup/borg/pull/2374?src=pr&el=tree#diff-c3JjL2JvcmcvYXJjaGl2ZXIucHk=) | `82.12% <100%> (-0.11%)` | :arrow_down: |
| [src/borg/archive.py](https://codecov.io/gh/borgbackup/borg/pull/2374?src=pr&el=tree#diff-c3JjL2JvcmcvYXJjaGl2ZS5weQ==) | `81.86% <0%> (-0.56%)` | :arrow_down: |
| [src/borg/remote.py](https://codecov.io/gh/borgbackup/borg/pull/2374?src=pr&el=tree#diff-c3JjL2JvcmcvcmVtb3RlLnB5) | `76.78% <0%> (-0.21%)` | :arrow_down: |
| [src/borg/helpers.py](https://codecov.io/gh/borgbackup/borg/pull/2374?src=pr&el=tree#diff-c3JjL2JvcmcvaGVscGVycy5weQ==) | `87.61% <0%> (+0.07%)` | :arrow_up: |
------
[Continue to review full report at Codecov](https://codecov.io/gh/borgbackup/borg/pull/2374?src=pr&el=continue).
> **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta)
> `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data`
> Powered by [Codecov](https://codecov.io/gh/borgbackup/borg/pull/2374?src=pr&el=footer). Last update [d79da81...b43abf1](https://codecov.io/gh/borgbackup/borg/pull/2374?src=pr&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments). | diff --git a/src/borg/archiver.py b/src/borg/archiver.py
index 7dccd55a..e73aaf2d 100644
--- a/src/borg/archiver.py
+++ b/src/borg/archiver.py
@@ -4,6 +4,7 @@
import functools
import hashlib
import inspect
+import itertools
import json
import logging
import os
@@ -3306,6 +3307,9 @@ def get_args(self, argv, cmd):
if cmd is not None and result.func == self.do_serve:
forced_result = result
argv = shlex.split(cmd)
+ # Drop environment variables (do *not* interpret them) before trying to parse
+ # the borg command line.
+ argv = list(itertools.dropwhile(lambda arg: '=' in arg, argv))
result = self.parse_args(argv[1:])
if result.func != forced_result.func:
# someone is trying to execute a different borg subcommand, don't do that!
| giving env var doesn't work for forced cmd
From IRC / bket:
```
macbook:~/bin$ borg-wrapper list --debug
using builtin fallback logging configuration
35 self tests completed in 0.23 seconds
SSH command line: ['ssh', '-i', '/Users/bket/.snapshot/keys/snapshot_ssh', '-o', 'IdentitiesOnly=yes', '[email protected]', 'BORG_HOSTNAME_IS_UNIQUE=yes', 'borg', 'serve', '--umask=077', '--debug']
Remote: usage: borg [-h] [-V] <command> ...
Remote: borg: error: argument <command>: invalid choice: 'borg' (choose from 'serve', 'init', 'check', 'change-passphrase', 'key', 'migrate-to-repokey', 'create', 'extract', 'rename', 'delete', 'list', 'mount', 'umount', 'info', 'break-lock', 'prune', 'upgrade', 'help', 'debug', 'debug-info', 'debug-dump-archive-items', 'debug-dump-repo-objs', 'debug-get-obj', 'debug-put-obj', 'debug-delete-obj', 'debug-refcount-obj')
Connection closed by remote host. Is borg working on the server?
Traceback (most recent call last):
File "borg/archiver.py", line 3476, in main
File "borg/archiver.py", line 3404, in run
File "borg/archiver.py", line 102, in wrapper
File "borg/remote.py", line 553, in __init__
borg.remote.ConnectionClosedWithHint: Connection closed by remote host. Is borg working on the server?
Platform: Darwin macbook 16.4.0 Darwin Kernel Version 16.4.0: Thu Dec 22 22:53:21 PST 2016; root:xnu-3789.41.3~3/RELEASE_X86_64 x86_64 i386
Borg: 1.1.0b4 Python: CPython 3.5.3
PID: 3279 CWD: /Users/bket/bin
sys.argv: ['borg', 'list', '--debug']
SSH_ORIGINAL_COMMAND: None
```
---
:moneybag: [there is a bounty for this](https://www.bountysource.com/issues/43561139-giving-env-var-doesn-t-work-for-forced-cmd)
| borgbackup/borg | diff --git a/src/borg/testsuite/archiver.py b/src/borg/testsuite/archiver.py
index 287dfe2c..f2608f13 100644
--- a/src/borg/testsuite/archiver.py
+++ b/src/borg/testsuite/archiver.py
@@ -2841,6 +2841,13 @@ def test_get_args():
'borg init --encryption=repokey /')
assert args.func == archiver.do_serve
+ # Check that environment variables in the forced command don't cause issues. If the command
+ # were not forced, environment variables would be interpreted by the shell, but this does not
+ # happen for forced commands - we get the verbatim command line and need to deal with env vars.
+ args = archiver.get_args(['borg', 'serve', ],
+ 'BORG_HOSTNAME_IS_UNIQUE=yes borg serve --info')
+ assert args.func == archiver.do_serve
+
def test_compare_chunk_contents():
def ccc(a, b):
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 3,
"test_score": 0
},
"num_modified_files": 1
} | 1.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest pytest-cov pytest-xdist pytest-mock pytest-asyncio",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc libssl-dev libacl1-dev liblz4-dev libzstd-dev pkg-config build-essential"
],
"python": "3.9",
"reqs_path": [
"requirements.d/development.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | -e git+https://github.com/borgbackup/borg.git@f878678b0c1df0e4c4971e7ef668e49a889e0545#egg=borgbackup
cachetools==5.5.2
chardet==5.2.0
colorama==0.4.6
coverage==7.8.0
Cython==3.0.12
distlib==0.3.9
exceptiongroup==1.2.2
execnet==2.1.1
filelock==3.18.0
iniconfig==2.1.0
msgpack-python==0.5.6
packaging==24.2
platformdirs==4.3.7
pluggy==1.5.0
py-cpuinfo==9.0.0
pyproject-api==1.9.0
pytest==8.3.5
pytest-asyncio==0.26.0
pytest-benchmark==5.1.0
pytest-cov==6.0.0
pytest-mock==3.14.0
pytest-xdist==3.6.1
setuptools-scm==8.2.0
tomli==2.2.1
tox==4.25.0
typing_extensions==4.13.0
virtualenv==20.29.3
| name: borg
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- cachetools==5.5.2
- chardet==5.2.0
- colorama==0.4.6
- coverage==7.8.0
- cython==3.0.12
- distlib==0.3.9
- exceptiongroup==1.2.2
- execnet==2.1.1
- filelock==3.18.0
- iniconfig==2.1.0
- msgpack-python==0.5.6
- packaging==24.2
- platformdirs==4.3.7
- pluggy==1.5.0
- py-cpuinfo==9.0.0
- pyproject-api==1.9.0
- pytest==8.3.5
- pytest-asyncio==0.26.0
- pytest-benchmark==5.1.0
- pytest-cov==6.0.0
- pytest-mock==3.14.0
- pytest-xdist==3.6.1
- setuptools-scm==8.2.0
- tomli==2.2.1
- tox==4.25.0
- typing-extensions==4.13.0
- virtualenv==20.29.3
prefix: /opt/conda/envs/borg
| [
"src/borg/testsuite/archiver.py::test_get_args"
]
| [
"src/borg/testsuite/archiver.py::test_return_codes[python]",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_aes_counter_uniqueness_keyfile",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_aes_counter_uniqueness_passphrase",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_atime",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_bad_filters",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_basic_functionality",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_break_lock",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_change_passphrase",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_check_cache",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_comment",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_compression_auto_compressible",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_compression_auto_uncompressible",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_compression_lz4_compressible",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_compression_lzma_compressible",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_compression_none_compressible",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_compression_none_uncompressible",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_compression_zlib_compressible",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_corrupted_repository",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_create_dry_run",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_create_json",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_create_pattern",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_create_pattern_file",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_create_pattern_root",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_create_read_special_broken_symlink",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_create_topical",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_create_without_root",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_debug_dump_archive",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_debug_dump_archive_items",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_debug_dump_manifest",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_debug_dump_repo_objs",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_debug_put_get_delete_obj",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_delete",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_delete_repo",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_exclude_caches",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_exclude_keep_tagged",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_exclude_keep_tagged_deprecation",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_exclude_normalization",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_exclude_tagged",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_extract_hardlinks",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_extract_include_exclude",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_extract_include_exclude_regex",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_extract_include_exclude_regex_from_file",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_extract_list_output",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_extract_pattern_opt",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_extract_progress",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_extract_with_pattern",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_file_status",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_file_status_excluded",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_info",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_info_json",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_key_export_keyfile",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_key_export_paperkey",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_key_export_qr",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_key_export_repokey",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_key_import_errors",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_list_chunk_counts",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_list_format",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_list_hash",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_list_json",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_list_prefix",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_list_repository_format",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_list_size",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_overwrite",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_path_normalization",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_progress_off",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_progress_on",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_prune_repository",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_prune_repository_prefix",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_prune_repository_save_space",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_recreate_basic",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_recreate_dry_run",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_recreate_exclude_caches",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_recreate_exclude_keep_tagged",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_recreate_exclude_tagged",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_recreate_list_output",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_recreate_rechunkify",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_recreate_recompress",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_recreate_skips_nothing_to_do",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_recreate_subtree_hardlinks",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_recreate_target",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_recreate_target_rc",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_rename",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_repeated_files",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_repository_move",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_repository_swap_detection",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_repository_swap_detection2",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_repository_swap_detection2_no_cache",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_repository_swap_detection_no_cache",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_security_dir_compat",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_sparse_file",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_strip_components",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_strip_components_links",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_symlink_extract",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_umask",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_unix_socket",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_unusual_filenames",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_with_lock",
"src/borg/testsuite/archiver.py::ArchiverCheckTestCase::test_attic013_acl_bug",
"src/borg/testsuite/archiver.py::ArchiverCheckTestCase::test_check_usage",
"src/borg/testsuite/archiver.py::ArchiverCheckTestCase::test_corrupted_manifest",
"src/borg/testsuite/archiver.py::ArchiverCheckTestCase::test_empty_repository",
"src/borg/testsuite/archiver.py::ArchiverCheckTestCase::test_extra_chunks",
"src/borg/testsuite/archiver.py::ArchiverCheckTestCase::test_manifest_rebuild_corrupted_chunk",
"src/borg/testsuite/archiver.py::ArchiverCheckTestCase::test_manifest_rebuild_duplicate_archive",
"src/borg/testsuite/archiver.py::ArchiverCheckTestCase::test_missing_archive_item_chunk",
"src/borg/testsuite/archiver.py::ArchiverCheckTestCase::test_missing_archive_metadata",
"src/borg/testsuite/archiver.py::ArchiverCheckTestCase::test_missing_file_chunk",
"src/borg/testsuite/archiver.py::ArchiverCheckTestCase::test_missing_manifest",
"src/borg/testsuite/archiver.py::ArchiverCheckTestCase::test_verify_data",
"src/borg/testsuite/archiver.py::ArchiverCheckTestCase::test_verify_data_unencrypted",
"src/borg/testsuite/archiver.py::ManifestAuthenticationTest::test_disable",
"src/borg/testsuite/archiver.py::ManifestAuthenticationTest::test_disable2",
"src/borg/testsuite/archiver.py::ManifestAuthenticationTest::test_fresh_init_tam_required",
"src/borg/testsuite/archiver.py::ManifestAuthenticationTest::test_not_required",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_aes_counter_uniqueness_keyfile",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_aes_counter_uniqueness_passphrase",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_atime",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_bad_filters",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_basic_functionality",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_break_lock",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_change_passphrase",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_check_cache",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_comment",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_compression_auto_compressible",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_compression_auto_uncompressible",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_compression_lz4_compressible",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_compression_lz4_uncompressible",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_compression_lzma_compressible",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_compression_lzma_uncompressible",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_compression_none_compressible",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_compression_none_uncompressible",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_compression_zlib_compressible",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_compression_zlib_uncompressible",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_corrupted_repository",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_create_dry_run",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_create_json",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_create_pattern",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_create_pattern_file",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_create_pattern_root",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_create_read_special_broken_symlink",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_create_topical",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_create_without_root",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_debug_dump_archive",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_debug_dump_archive_items",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_debug_dump_manifest",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_debug_dump_repo_objs",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_delete",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_delete_force",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_delete_repo",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_exclude_caches",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_exclude_keep_tagged",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_exclude_keep_tagged_deprecation",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_exclude_normalization",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_exclude_tagged",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_extract_hardlinks",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_extract_include_exclude",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_extract_include_exclude_regex",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_extract_include_exclude_regex_from_file",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_extract_list_output",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_extract_pattern_opt",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_extract_progress",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_extract_with_pattern",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_file_status",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_file_status_excluded",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_info",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_info_json",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_key_export_keyfile",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_key_export_paperkey",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_key_export_qr",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_key_export_repokey",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_key_import_errors",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_list_chunk_counts",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_list_format",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_list_hash",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_list_json",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_list_prefix",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_list_repository_format",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_list_size",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_overwrite",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_path_normalization",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_progress_off",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_progress_on",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_prune_repository",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_prune_repository_prefix",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_prune_repository_save_space",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_recreate_basic",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_recreate_dry_run",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_recreate_exclude_caches",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_recreate_exclude_keep_tagged",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_recreate_exclude_tagged",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_recreate_list_output",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_recreate_rechunkify",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_recreate_recompress",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_recreate_skips_nothing_to_do",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_recreate_subtree_hardlinks",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_recreate_target",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_recreate_target_rc",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_remote_repo_restrict_to_path",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_rename",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_repeated_files",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_repository_move",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_repository_swap_detection",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_repository_swap_detection2",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_repository_swap_detection2_no_cache",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_repository_swap_detection_no_cache",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_security_dir_compat",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_sparse_file",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_strip_components",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_strip_components_doesnt_leak",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_strip_components_links",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_symlink_extract",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_umask",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_unix_socket",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_unusual_filenames",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_with_lock",
"src/borg/testsuite/archiver.py::DiffArchiverTestCase::test_basic_functionality",
"src/borg/testsuite/archiver.py::DiffArchiverTestCase::test_sort_option"
]
| [
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_compression_lz4_uncompressible",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_compression_lzma_uncompressible",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_compression_zlib_uncompressible",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_delete_double_force",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_delete_force",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_help",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_init_interrupt",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_init_requires_encryption_option",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_unknown_unencrypted",
"src/borg/testsuite/archiver.py::ArchiverTestCase::test_usage",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_delete_double_force",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_help",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_init_interrupt",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_init_requires_encryption_option",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_unknown_unencrypted",
"src/borg/testsuite/archiver.py::RemoteArchiverTestCase::test_usage",
"src/borg/testsuite/archiver.py::test_compare_chunk_contents",
"src/borg/testsuite/archiver.py::TestBuildFilter::test_basic",
"src/borg/testsuite/archiver.py::TestBuildFilter::test_empty",
"src/borg/testsuite/archiver.py::TestBuildFilter::test_strip_components"
]
| []
| BSD License | 1,141 | [
"src/borg/archiver.py"
]
| [
"src/borg/archiver.py"
]
|
bmcfee__pumpp-57 | 7486f998acdd0f82241aeb0efdd6912da758c961 | 2017-04-03 16:56:42 | 797a732eb907bd816b75b10da4707517789e871d | diff --git a/pumpp/core.py b/pumpp/core.py
index cf39549..ab389c4 100644
--- a/pumpp/core.py
+++ b/pumpp/core.py
@@ -150,7 +150,7 @@ class Pump(object):
return transform(audio_f, jam, *self.ops)
- def sampler(self, n_samples, duration):
+ def sampler(self, n_samples, duration, random_state=None):
'''Construct a sampler object for this pump's operators.
Parameters
@@ -161,6 +161,16 @@ class Pump(object):
duration : int > 0
The duration (in frames) of each sample patch
+ random_state : None, int, or np.random.RandomState
+ If int, random_state is the seed used by the random number
+ generator;
+
+ If RandomState instance, random_state is the random number
+ generator;
+
+ If None, the random number generator is the RandomState instance
+ used by np.random.
+
Returns
-------
sampler : pumpp.Sampler
@@ -171,7 +181,9 @@ class Pump(object):
pumpp.sampler.Sampler
'''
- return Sampler(n_samples, duration, *self.ops)
+ return Sampler(n_samples, duration,
+ random_state=random_state,
+ *self.ops)
@property
def fields(self):
diff --git a/pumpp/version.py b/pumpp/version.py
index 20f3f16..05cc0e1 100644
--- a/pumpp/version.py
+++ b/pumpp/version.py
@@ -3,4 +3,4 @@
"""Version info"""
short_version = '0.1'
-version = '0.1.3'
+version = '0.1.4pre'
| extend pump.sampler interface
Or at least expose all the args and kwargs | bmcfee/pumpp | diff --git a/tests/test_core.py b/tests/test_core.py
index c74ac7b..4feb22f 100644
--- a/tests/test_core.py
+++ b/tests/test_core.py
@@ -166,7 +166,8 @@ def test_pump_badkey(sr, hop_length):
@pytest.mark.parametrize('n_samples', [None, 10])
@pytest.mark.parametrize('duration', [1, 5])
-def test_pump_sampler(sr, hop_length, n_samples, duration):
[email protected]('rng', [None, 1])
+def test_pump_sampler(sr, hop_length, n_samples, duration, rng):
ops = [pumpp.feature.STFT(name='stft', sr=sr,
hop_length=hop_length,
n_fft=2*hop_length),
@@ -176,8 +177,8 @@ def test_pump_sampler(sr, hop_length, n_samples, duration):
P = pumpp.Pump(*ops)
- S1 = pumpp.Sampler(n_samples, duration, *ops)
- S2 = P.sampler(n_samples, duration)
+ S1 = pumpp.Sampler(n_samples, duration, random_state=rng, *ops)
+ S2 = P.sampler(n_samples, duration, random_state=rng)
assert S1._time == S2._time
assert S1.n_samples == S2.n_samples
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 3,
"test_score": 2
},
"num_modified_files": 2
} | 0.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[docs,tests]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y ffmpeg"
],
"python": "3.5",
"reqs_path": [
"docs/requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | absl-py==0.15.0
alabaster==0.7.13
appdirs==1.4.4
astunparse==1.6.3
attrs==22.2.0
audioread==3.0.1
Babel==2.11.0
cached-property==1.5.2
cachetools==4.2.4
certifi==2021.5.30
cffi==1.15.1
charset-normalizer==2.0.12
clang==5.0
coverage==6.2
dataclasses==0.8
decorator==5.1.1
docutils==0.18.1
flatbuffers==1.12
gast==0.4.0
google-auth==1.35.0
google-auth-oauthlib==0.4.6
google-pasta==0.2.0
grpcio==1.48.2
h5py==3.1.0
idna==3.10
imagesize==1.4.1
importlib-metadata==4.8.3
importlib-resources==5.4.0
iniconfig==1.1.1
jams==0.3.4
Jinja2==3.0.3
joblib==1.1.1
jsonschema==3.2.0
keras==2.6.0
Keras-Preprocessing==1.1.2
librosa==0.9.2
llvmlite==0.36.0
Markdown==3.3.7
MarkupSafe==2.0.1
mir_eval==0.8.2
numba==0.53.1
numpy==1.19.5
numpydoc==1.1.0
oauthlib==3.2.2
opt-einsum==3.3.0
packaging==21.3
pandas==1.1.5
pluggy==1.0.0
pooch==1.6.0
protobuf==3.19.6
-e git+https://github.com/bmcfee/pumpp.git@7486f998acdd0f82241aeb0efdd6912da758c961#egg=pumpp
py==1.11.0
pyasn1==0.5.1
pyasn1-modules==0.3.0
pycparser==2.21
Pygments==2.14.0
pyparsing==3.1.4
pyrsistent==0.18.0
pytest==7.0.1
pytest-cov==4.0.0
python-dateutil==2.9.0.post0
pytz==2025.2
requests==2.27.1
requests-oauthlib==2.0.0
resampy==0.4.3
rsa==4.9
scikit-learn==0.24.2
scipy==1.5.4
six==1.15.0
snowballstemmer==2.2.0
sortedcontainers==2.4.0
soundfile==0.13.1
Sphinx==5.3.0
sphinxcontrib-applehelp==1.0.2
sphinxcontrib-devhelp==1.0.2
sphinxcontrib-htmlhelp==2.0.0
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==1.0.3
sphinxcontrib-serializinghtml==1.1.5
tensorboard==2.6.0
tensorboard-data-server==0.6.1
tensorboard-plugin-wit==1.8.1
tensorflow==2.6.2
tensorflow-estimator==2.6.0
termcolor==1.1.0
threadpoolctl==3.1.0
tomli==1.2.3
typing-extensions==3.7.4.3
urllib3==1.26.20
Werkzeug==2.0.3
wrapt==1.12.1
zipp==3.6.0
| name: pumpp
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- absl-py==0.15.0
- alabaster==0.7.13
- appdirs==1.4.4
- astunparse==1.6.3
- attrs==22.2.0
- audioread==3.0.1
- babel==2.11.0
- cached-property==1.5.2
- cachetools==4.2.4
- cffi==1.15.1
- charset-normalizer==2.0.12
- clang==5.0
- coverage==6.2
- dataclasses==0.8
- decorator==5.1.1
- docutils==0.18.1
- flatbuffers==1.12
- gast==0.4.0
- google-auth==1.35.0
- google-auth-oauthlib==0.4.6
- google-pasta==0.2.0
- grpcio==1.48.2
- h5py==3.1.0
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==4.8.3
- importlib-resources==5.4.0
- iniconfig==1.1.1
- jams==0.3.4
- jinja2==3.0.3
- joblib==1.1.1
- jsonschema==3.2.0
- keras==2.6.0
- keras-preprocessing==1.1.2
- librosa==0.9.2
- llvmlite==0.36.0
- markdown==3.3.7
- markupsafe==2.0.1
- mir-eval==0.8.2
- numba==0.53.1
- numpy==1.19.5
- numpydoc==1.1.0
- oauthlib==3.2.2
- opt-einsum==3.3.0
- packaging==21.3
- pandas==1.1.5
- pluggy==1.0.0
- pooch==1.6.0
- protobuf==3.19.6
- py==1.11.0
- pyasn1==0.5.1
- pyasn1-modules==0.3.0
- pycparser==2.21
- pygments==2.14.0
- pyparsing==3.1.4
- pyrsistent==0.18.0
- pytest==7.0.1
- pytest-cov==4.0.0
- python-dateutil==2.9.0.post0
- pytz==2025.2
- requests==2.27.1
- requests-oauthlib==2.0.0
- resampy==0.4.3
- rsa==4.9
- scikit-learn==0.24.2
- scipy==1.5.4
- six==1.15.0
- snowballstemmer==2.2.0
- sortedcontainers==2.4.0
- soundfile==0.13.1
- sphinx==5.3.0
- sphinxcontrib-applehelp==1.0.2
- sphinxcontrib-devhelp==1.0.2
- sphinxcontrib-htmlhelp==2.0.0
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==1.0.3
- sphinxcontrib-serializinghtml==1.1.5
- tensorboard==2.6.0
- tensorboard-data-server==0.6.1
- tensorboard-plugin-wit==1.8.1
- tensorflow==2.6.2
- tensorflow-estimator==2.6.0
- termcolor==1.1.0
- threadpoolctl==3.1.0
- tomli==1.2.3
- typing-extensions==3.7.4.3
- urllib3==1.26.20
- werkzeug==2.0.3
- wrapt==1.12.1
- zipp==3.6.0
prefix: /opt/conda/envs/pumpp
| [
"tests/test_core.py::test_pump_sampler[11025-128-None-1-None]",
"tests/test_core.py::test_pump_sampler[11025-128-None-1-10]",
"tests/test_core.py::test_pump_sampler[11025-128-None-5-None]",
"tests/test_core.py::test_pump_sampler[11025-128-None-5-10]",
"tests/test_core.py::test_pump_sampler[11025-128-1-1-None]",
"tests/test_core.py::test_pump_sampler[11025-128-1-1-10]",
"tests/test_core.py::test_pump_sampler[11025-128-1-5-None]",
"tests/test_core.py::test_pump_sampler[11025-128-1-5-10]",
"tests/test_core.py::test_pump_sampler[11025-512-None-1-None]",
"tests/test_core.py::test_pump_sampler[11025-512-None-1-10]",
"tests/test_core.py::test_pump_sampler[11025-512-None-5-None]",
"tests/test_core.py::test_pump_sampler[11025-512-None-5-10]",
"tests/test_core.py::test_pump_sampler[11025-512-1-1-None]",
"tests/test_core.py::test_pump_sampler[11025-512-1-1-10]",
"tests/test_core.py::test_pump_sampler[11025-512-1-5-None]",
"tests/test_core.py::test_pump_sampler[11025-512-1-5-10]",
"tests/test_core.py::test_pump_sampler[22050-128-None-1-None]",
"tests/test_core.py::test_pump_sampler[22050-128-None-1-10]",
"tests/test_core.py::test_pump_sampler[22050-128-None-5-None]",
"tests/test_core.py::test_pump_sampler[22050-128-None-5-10]",
"tests/test_core.py::test_pump_sampler[22050-128-1-1-None]",
"tests/test_core.py::test_pump_sampler[22050-128-1-1-10]",
"tests/test_core.py::test_pump_sampler[22050-128-1-5-None]",
"tests/test_core.py::test_pump_sampler[22050-128-1-5-10]",
"tests/test_core.py::test_pump_sampler[22050-512-None-1-None]",
"tests/test_core.py::test_pump_sampler[22050-512-None-1-10]",
"tests/test_core.py::test_pump_sampler[22050-512-None-5-None]",
"tests/test_core.py::test_pump_sampler[22050-512-None-5-10]",
"tests/test_core.py::test_pump_sampler[22050-512-1-1-None]",
"tests/test_core.py::test_pump_sampler[22050-512-1-1-10]",
"tests/test_core.py::test_pump_sampler[22050-512-1-5-None]",
"tests/test_core.py::test_pump_sampler[22050-512-1-5-10]"
]
| [
"tests/test_core.py::test_transform[None-11025-128-tests/data/test.ogg]",
"tests/test_core.py::test_transform[None-11025-512-tests/data/test.ogg]",
"tests/test_core.py::test_transform[None-22050-128-tests/data/test.ogg]",
"tests/test_core.py::test_transform[None-22050-512-tests/data/test.ogg]",
"tests/test_core.py::test_transform[tests/data/test.jams-11025-128-tests/data/test.ogg]",
"tests/test_core.py::test_transform[tests/data/test.jams-11025-512-tests/data/test.ogg]",
"tests/test_core.py::test_transform[tests/data/test.jams-22050-128-tests/data/test.ogg]",
"tests/test_core.py::test_transform[tests/data/test.jams-22050-512-tests/data/test.ogg]",
"tests/test_core.py::test_transform[jam2-11025-128-tests/data/test.ogg]",
"tests/test_core.py::test_transform[jam2-11025-512-tests/data/test.ogg]",
"tests/test_core.py::test_transform[jam2-22050-128-tests/data/test.ogg]",
"tests/test_core.py::test_transform[jam2-22050-512-tests/data/test.ogg]",
"tests/test_core.py::test_pump[None-11025-128-tests/data/test.ogg]",
"tests/test_core.py::test_pump[None-11025-512-tests/data/test.ogg]",
"tests/test_core.py::test_pump[None-22050-128-tests/data/test.ogg]",
"tests/test_core.py::test_pump[None-22050-512-tests/data/test.ogg]",
"tests/test_core.py::test_pump[tests/data/test.jams-11025-128-tests/data/test.ogg]",
"tests/test_core.py::test_pump[tests/data/test.jams-11025-512-tests/data/test.ogg]",
"tests/test_core.py::test_pump[tests/data/test.jams-22050-128-tests/data/test.ogg]",
"tests/test_core.py::test_pump[tests/data/test.jams-22050-512-tests/data/test.ogg]",
"tests/test_core.py::test_pump[jam2-11025-128-tests/data/test.ogg]",
"tests/test_core.py::test_pump[jam2-11025-512-tests/data/test.ogg]",
"tests/test_core.py::test_pump[jam2-22050-128-tests/data/test.ogg]",
"tests/test_core.py::test_pump[jam2-22050-512-tests/data/test.ogg]"
]
| [
"tests/test_core.py::test_pump_empty[None-11025-128-tests/data/test.ogg]",
"tests/test_core.py::test_pump_empty[None-11025-512-tests/data/test.ogg]",
"tests/test_core.py::test_pump_empty[None-22050-128-tests/data/test.ogg]",
"tests/test_core.py::test_pump_empty[None-22050-512-tests/data/test.ogg]",
"tests/test_core.py::test_pump_empty[tests/data/test.jams-11025-128-tests/data/test.ogg]",
"tests/test_core.py::test_pump_empty[tests/data/test.jams-11025-512-tests/data/test.ogg]",
"tests/test_core.py::test_pump_empty[tests/data/test.jams-22050-128-tests/data/test.ogg]",
"tests/test_core.py::test_pump_empty[tests/data/test.jams-22050-512-tests/data/test.ogg]",
"tests/test_core.py::test_pump_empty[jam2-11025-128-tests/data/test.ogg]",
"tests/test_core.py::test_pump_empty[jam2-11025-512-tests/data/test.ogg]",
"tests/test_core.py::test_pump_empty[jam2-22050-128-tests/data/test.ogg]",
"tests/test_core.py::test_pump_empty[jam2-22050-512-tests/data/test.ogg]",
"tests/test_core.py::test_pump_add[11025-128]",
"tests/test_core.py::test_pump_add[11025-512]",
"tests/test_core.py::test_pump_add[22050-128]",
"tests/test_core.py::test_pump_add[22050-512]"
]
| []
| ISC License | 1,142 | [
"pumpp/core.py",
"pumpp/version.py"
]
| [
"pumpp/core.py",
"pumpp/version.py"
]
|
|
duecredit__duecredit-106 | 108397f32edf8b35ad6b416fa50f33a110263a3b | 2017-04-03 18:28:30 | 2221bfdb3c89afd30daac78de1a78a893e932b58 | diff --git a/duecredit/dueswitch.py b/duecredit/dueswitch.py
index 07ea589..1f76fb1 100644
--- a/duecredit/dueswitch.py
+++ b/duecredit/dueswitch.py
@@ -69,16 +69,23 @@ class DueSwitch(object):
return self.__active
@never_fail
- def _dump_collector_summary(self):
+ def dump(self, **kwargs):
+ """Dumps summary of the citations
+
+ Parameters
+ ----------
+ **kwargs: dict
+ Passed to `CollectorSummary` constructor.
+ """
from duecredit.collector import CollectorSummary
- due_summary = CollectorSummary(self.__collectors[True])
+ due_summary = CollectorSummary(self.__collectors[True], **kwargs)
due_summary.dump()
def __prepare_exit_and_injections(self):
# Wrapper to create and dump summary... passing method doesn't work:
# probably removes instance too early
- atexit.register(self._dump_collector_summary)
+ atexit.register(self.dump)
# Deal with injector
from .injections import DueCreditInjector
@@ -93,7 +100,7 @@ class DueSwitch(object):
is_public = lambda x: not x.startswith('_')
# Clean up current bindings first
for k in filter(is_public, dir(self)):
- if k not in ('activate', 'active'):
+ if k not in ('activate', 'active', 'dump'):
delattr(self, k)
new_due = self.__collectors[activate]
| Make `DueSwitch._dump_collector_summary` public?
I am very new to this library, so there may be good reasons to avoid this. However, from my perspective, it seems natural to have `DueSwitch._dump_collector_summary` as a public function (perhaps exposed as `DueSwitch.due_summary` or something) to make using duecredit with standalone jupyter notebooks a little more self-contained. I am thinking along the lines of:
# near top of notebook
import os
os.environ['DUECREDIT_ENABLE'] = 'yes'
# body of notebook
import fancymodule as fm
fm.do_stuff()
# section at the end
fm.due._dump_collector_summary()
This way one doesn't have to deal with external files keeping track of citations, possibly being added to by other notebooks which could possibly be undesirable.
| duecredit/duecredit | diff --git a/duecredit/tests/test_dueswitch.py b/duecredit/tests/test_dueswitch.py
index 6a1b826..c1d664d 100644
--- a/duecredit/tests/test_dueswitch.py
+++ b/duecredit/tests/test_dueswitch.py
@@ -38,4 +38,4 @@ def test_dueswitch_activate(monkeypatch):
# was not active, so should have called activate of the injector class
assert state["activate"] == 1
assert state["register"] == 1
- assert state["register_func"] == due._dump_collector_summary
+ assert state["register_func"] == due.dump
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 1
} | 0.6 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[tests]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y libxml2-dev libxslt1-dev"
],
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2025.1.31
charset-normalizer==3.4.1
citeproc-py==0.8.2
contextlib2==21.6.0
-e git+https://github.com/duecredit/duecredit.git@108397f32edf8b35ad6b416fa50f33a110263a3b#egg=duecredit
exceptiongroup==1.2.2
idna==3.10
iniconfig==2.1.0
lxml==5.3.1
multidict==6.2.0
packaging==24.2
pluggy==1.5.0
propcache==0.3.1
pytest==8.3.5
PyYAML==6.0.2
requests==2.32.3
six==1.17.0
tomli==2.2.1
typing_extensions==4.13.0
urllib3==1.26.20
vcrpy==7.0.0
wrapt==1.17.2
yarl==1.18.3
| name: duecredit
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- charset-normalizer==3.4.1
- citeproc-py==0.8.2
- contextlib2==21.6.0
- exceptiongroup==1.2.2
- idna==3.10
- iniconfig==2.1.0
- lxml==5.3.1
- multidict==6.2.0
- packaging==24.2
- pluggy==1.5.0
- propcache==0.3.1
- pytest==8.3.5
- pyyaml==6.0.2
- requests==2.32.3
- six==1.17.0
- tomli==2.2.1
- typing-extensions==4.13.0
- urllib3==1.26.20
- vcrpy==7.0.0
- wrapt==1.17.2
- yarl==1.18.3
prefix: /opt/conda/envs/duecredit
| [
"duecredit/tests/test_dueswitch.py::test_dueswitch_activate"
]
| []
| []
| []
| BSD License | 1,143 | [
"duecredit/dueswitch.py"
]
| [
"duecredit/dueswitch.py"
]
|
|
Azure__msrest-for-python-28 | 02b1e35c7cfb045bd4752abe800ad6912282eb6e | 2017-04-03 22:04:40 | 24deba7a7a9e335314058ec2d0b39a710f61be60 | diff --git a/msrest/service_client.py b/msrest/service_client.py
index a204089..ac70211 100644
--- a/msrest/service_client.py
+++ b/msrest/service_client.py
@@ -138,7 +138,7 @@ class ServiceClient(object):
return kwargs
- def send_formdata(self, request, headers={}, content={}, **config):
+ def send_formdata(self, request, headers=None, content=None, **config):
"""Send data as a multipart form-data request.
We only deal with file-like objects or strings at this point.
The requests is not yet streamed.
@@ -148,11 +148,11 @@ class ServiceClient(object):
:param dict content: Dictionary of the fields of the formdata.
:param config: Any specific config overrides.
"""
+ if content is None:
+ content = {}
file_data = {f: self._format_data(d) for f, d in content.items()}
- try:
- del headers['Content-Type']
- except KeyError:
- pass
+ if headers:
+ headers.pop('Content-Type', None)
return self.send(request, headers, None, files=file_data, **config)
def send(self, request, headers=None, content=None, **config):
@@ -290,7 +290,7 @@ class ServiceClient(object):
"""
self._headers[header] = value
- def get(self, url=None, params={}):
+ def get(self, url=None, params=None):
"""Create a GET request object.
:param str url: The request URL.
@@ -300,7 +300,7 @@ class ServiceClient(object):
request.method = 'GET'
return request
- def put(self, url=None, params={}):
+ def put(self, url=None, params=None):
"""Create a PUT request object.
:param str url: The request URL.
@@ -310,7 +310,7 @@ class ServiceClient(object):
request.method = 'PUT'
return request
- def post(self, url=None, params={}):
+ def post(self, url=None, params=None):
"""Create a POST request object.
:param str url: The request URL.
@@ -320,7 +320,7 @@ class ServiceClient(object):
request.method = 'POST'
return request
- def head(self, url=None, params={}):
+ def head(self, url=None, params=None):
"""Create a HEAD request object.
:param str url: The request URL.
@@ -330,7 +330,7 @@ class ServiceClient(object):
request.method = 'HEAD'
return request
- def patch(self, url=None, params={}):
+ def patch(self, url=None, params=None):
"""Create a PATCH request object.
:param str url: The request URL.
@@ -340,7 +340,7 @@ class ServiceClient(object):
request.method = 'PATCH'
return request
- def delete(self, url=None, params={}):
+ def delete(self, url=None, params=None):
"""Create a DELETE request object.
:param str url: The request URL.
@@ -350,7 +350,7 @@ class ServiceClient(object):
request.method = 'DELETE'
return request
- def merge(self, url=None, params={}):
+ def merge(self, url=None, params=None):
"""Create a MERGE request object.
:param str url: The request URL.
| Default argument value is mutable
Hi,
Just want to confirm, I noticed there are some methods in `ServiceClient` contain a mutable default argument, for instance:
`def get(self, url=None, params={}):
"""Create a GET request object.
:param str url: The request URL.
:param dict params: Request URL parameters.
"""
request = self._request(url, params)
request.method = 'GET'
return request`
And the default argument `params` is changed in`self._request(url, params)`. Is that as design? I just think it's a little wired. Thanks!
| Azure/msrest-for-python | diff --git a/test/unittest_client.py b/test/unittest_client.py
index c22b13e..4a193ee 100644
--- a/test/unittest_client.py
+++ b/test/unittest_client.py
@@ -201,7 +201,7 @@ class TestServiceClient(unittest.TestCase):
mock_client._format_data.return_value = "formatted"
request = ClientRequest('GET')
ServiceClient.send_formdata(mock_client, request)
- mock_client.send.assert_called_with(request, {}, None, files={})
+ mock_client.send.assert_called_with(request, None, None, files={})
ServiceClient.send_formdata(mock_client, request, {'id':'1234'}, {'Test':'Data'})
mock_client.send.assert_called_with(request, {'id':'1234'}, None, files={'Test':'formatted'})
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 1
},
"num_modified_files": 1
} | 0.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"coverage",
"flake8"
],
"pre_install": null,
"python": "3.5",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
charset-normalizer==2.0.12
coverage==6.2
flake8==5.0.4
idna==3.10
importlib-metadata==4.2.0
iniconfig==1.1.1
isodate==0.6.1
mccabe==0.7.0
-e git+https://github.com/Azure/msrest-for-python.git@02b1e35c7cfb045bd4752abe800ad6912282eb6e#egg=msrest
oauthlib==3.2.2
packaging==21.3
pluggy==1.0.0
py==1.11.0
pycodestyle==2.9.1
pyflakes==2.5.0
pyparsing==3.1.4
pytest==7.0.1
requests==2.27.1
requests-oauthlib==2.0.0
six==1.17.0
tomli==1.2.3
typing_extensions==4.1.1
urllib3==1.26.20
zipp==3.6.0
| name: msrest-for-python
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- charset-normalizer==2.0.12
- coverage==6.2
- flake8==5.0.4
- idna==3.10
- importlib-metadata==4.2.0
- iniconfig==1.1.1
- isodate==0.6.1
- mccabe==0.7.0
- oauthlib==3.2.2
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pycodestyle==2.9.1
- pyflakes==2.5.0
- pyparsing==3.1.4
- pytest==7.0.1
- requests==2.27.1
- requests-oauthlib==2.0.0
- six==1.17.0
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- zipp==3.6.0
prefix: /opt/conda/envs/msrest-for-python
| [
"test/unittest_client.py::TestServiceClient::test_client_formdata_send"
]
| []
| [
"test/unittest_client.py::TestServiceClient::test_client_add_hook",
"test/unittest_client.py::TestServiceClient::test_client_header",
"test/unittest_client.py::TestServiceClient::test_client_request",
"test/unittest_client.py::TestServiceClient::test_client_send",
"test/unittest_client.py::TestServiceClient::test_format_data",
"test/unittest_client.py::TestServiceClient::test_format_url"
]
| []
| MIT License | 1,144 | [
"msrest/service_client.py"
]
| [
"msrest/service_client.py"
]
|
|
pimutils__todoman-231 | bbbefc26fb7b404e828e0a43df77150028cf359e | 2017-04-03 23:33:44 | 836284bf7c4b30a3fe761d518933af24825bbc90 | diff --git a/todoman/model.py b/todoman/model.py
index 12f7a33..cff1434 100644
--- a/todoman/model.py
+++ b/todoman/model.py
@@ -99,6 +99,7 @@ class Todo:
self.dtstamp = now
self.due = None
self.id = None
+ self.last_modified = None
self.location = ''
self.percent_complete = 0
self.priority = 0
@@ -158,6 +159,7 @@ class Todo:
'dtstamp',
'start',
'due',
+ 'last_modified',
]
ALL_SUPPORTED_FIELDS = (
DATETIME_FIELDS +
@@ -234,6 +236,7 @@ class VtodoWritter:
'priority': 'priority',
'status': 'status',
'created_at': 'created',
+ 'last_modified': 'last-modified',
}
def __init__(self, todo):
@@ -249,9 +252,6 @@ class VtodoWritter:
'''
if isinstance(dt, date) and not isinstance(dt, datetime):
dt = datetime(dt.year, dt.month, dt.day)
- # XXX: Can we actually get times from the UI?
- elif isinstance(dt, time):
- dt = datetime.combine(date.today(), dt)
if not dt.tzinfo:
dt = dt.replace(tzinfo=LOCAL_TIMEZONE)
@@ -347,7 +347,7 @@ class Cache:
may be used for filtering/sorting.
"""
- SCHEMA_VERSION = 3
+ SCHEMA_VERSION = 4
def __init__(self, path):
self.cache_path = str(path)
@@ -429,6 +429,8 @@ class Cache:
"description" TEXT,
"location" TEXT,
"categories" TEXT,
+ "sequence" INTEGER,
+ "last_modified" INTEGER,
FOREIGN KEY(file_path) REFERENCES files(path) ON DELETE CASCADE
);
@@ -518,8 +520,10 @@ class Cache:
status,
description,
location,
- categories
- ) VALUES ({}?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
+ categories,
+ sequence,
+ last_modified
+ ) VALUES ({}?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
'''
due = self._serialize_datetime(todo, 'due')
@@ -543,6 +547,8 @@ class Cache:
todo.get('description', None),
todo.get('location', None),
todo.get('categories', None),
+ todo.get('sequence', 1),
+ self._serialize_datetime(todo, 'last-modified'),
)
if id:
@@ -704,6 +710,8 @@ class Cache:
todo.status = row['status']
todo.description = row['description']
todo.location = row['location']
+ todo.sequence = row['sequence']
+ todo.last_modified = row['last_modified']
todo.list = self.lists_map[row['list_name']]
todo.filename = os.path.basename(row['path'])
return todo
@@ -906,6 +914,8 @@ class Database:
def save(self, todo):
todo.sequence += 1
+ todo.last_modified = datetime.now(LOCAL_TIMEZONE)
+
vtodo = VtodoWritter(todo).write()
self.cache.expire_file(todo.path)
| Increment sequence number upon edition
This issue was moved from the issue tracker at GitLab.
---
Increment the [sequence number](https://tools.ietf.org/html/rfc5545#section-3.8.7.4) every time the file is modified/save.
* [x] `SEQUENCE`
* [x] `LAST-MODIFIED` | pimutils/todoman | diff --git a/tests/test_backend.py b/tests/test_backend.py
index 56a3dd9..7a4a771 100644
--- a/tests/test_backend.py
+++ b/tests/test_backend.py
@@ -3,6 +3,7 @@ from datetime import datetime
import pytest
import pytz
from dateutil.tz import tzlocal
+from freezegun import freeze_time
from todoman.model import Todo, VtodoWritter
@@ -57,3 +58,21 @@ def test_vtodo_serialization(todo_factory):
assert vtodo.get('priority') == 7
assert vtodo.decoded('due') == datetime(3000, 3, 21, tzinfo=tzlocal())
assert str(vtodo.get('status')) == 'IN-PROCESS'
+
+
+@freeze_time('2017-04-04 20:11:57')
+def test_update_last_modified(todo_factory, todos, tmpdir):
+ todo = todo_factory()
+ assert todo.last_modified == datetime.now(tzlocal())
+
+
+def test_sequence_increment(default_database, todo_factory, todos):
+ todo = todo_factory()
+ assert todo.sequence == 1
+
+ default_database.save(todo)
+ assert todo.sequence == 2
+
+ # Relaod (and check the caching flow for the sequence)
+ todo = next(todos())
+ assert todo.sequence == 2
diff --git a/tests/test_model.py b/tests/test_model.py
index 7ac59b2..fd41f52 100644
--- a/tests/test_model.py
+++ b/tests/test_model.py
@@ -1,6 +1,5 @@
from datetime import datetime
-import icalendar
import pytest
import pytz
from dateutil.tz import tzlocal
@@ -72,29 +71,6 @@ def test_change_paths(tmpdir, create):
assert not list(db.todos())
-def test_sequence_increment(tmpdir, default_database):
- todo = Todo(new=True, list=next(default_database.lists()))
- default_database.save(todo)
-
- with open(todo.path) as f:
- cal = icalendar.Calendar.from_ical(f.read())
- sequence, = [component.get("SEQUENCE", 0)
- for component in cal.subcomponents
- if component.name == "VTODO"]
-
- assert sequence == 1
-
- default_database.save(todo)
-
- with open(todo.path) as f:
- cal = icalendar.Calendar.from_ical(f.read())
- sequence, = [component.get("SEQUENCE", 0)
- for component in cal.subcomponents
- if component.name == "VTODO"]
-
- assert sequence == 2
-
-
def test_list_displayname(tmpdir):
tmpdir.join('default').mkdir()
with tmpdir.join('default').join('displayname').open('w') as f:
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_hyperlinks",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 1
} | 3.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.7",
"reqs_path": [
"requirements.txt",
"requirements-dev.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | atomicwrites==1.4.1
attrs==24.2.0
backports.zoneinfo==0.2.1
certifi @ file:///croot/certifi_1671487769961/work/certifi
click==8.1.8
click-log==0.4.0
configobj==5.0.9
coverage==7.2.7
exceptiongroup==1.2.2
flake8==5.0.4
flake8-import-order==0.18.2
freezegun==1.5.1
humanize==4.6.0
hypothesis==6.79.4
icalendar==5.0.13
importlib-metadata==4.2.0
iniconfig==2.0.0
mccabe==0.7.0
packaging==24.0
parsedatetime==2.6
pluggy==1.2.0
pycodestyle==2.9.1
pyflakes==2.5.0
pytest==7.4.4
pytest-cov==4.1.0
python-dateutil==2.9.0.post0
pytz==2025.2
pyxdg==0.28
six==1.17.0
sortedcontainers==2.4.0
tabulate==0.9.0
-e git+https://github.com/pimutils/todoman.git@bbbefc26fb7b404e828e0a43df77150028cf359e#egg=todoman
tomli==2.0.1
typing_extensions==4.7.1
urwid==2.6.16
wcwidth==0.2.13
zipp==3.15.0
| name: todoman
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2022.12.7=py37h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=22.3.1=py37h06a4308_0
- python=3.7.16=h7a1cb2a_0
- readline=8.2=h5eee18b_0
- setuptools=65.6.3=py37h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.38.4=py37h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- atomicwrites==1.4.1
- attrs==24.2.0
- backports-zoneinfo==0.2.1
- click==8.1.8
- click-log==0.4.0
- configobj==5.0.9
- coverage==7.2.7
- exceptiongroup==1.2.2
- flake8==5.0.4
- flake8-import-order==0.18.2
- freezegun==1.5.1
- humanize==4.6.0
- hypothesis==6.79.4
- icalendar==5.0.13
- importlib-metadata==4.2.0
- iniconfig==2.0.0
- mccabe==0.7.0
- packaging==24.0
- parsedatetime==2.6
- pluggy==1.2.0
- pycodestyle==2.9.1
- pyflakes==2.5.0
- pytest==7.4.4
- pytest-cov==4.1.0
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyxdg==0.28
- six==1.17.0
- sortedcontainers==2.4.0
- tabulate==0.9.0
- tomli==2.0.1
- typing-extensions==4.7.1
- urwid==2.6.16
- wcwidth==0.2.13
- zipp==3.15.0
prefix: /opt/conda/envs/todoman
| [
"tests/test_backend.py::test_update_last_modified",
"tests/test_backend.py::test_sequence_increment"
]
| [
"tests/test_backend.py::test_vtodo_serialization"
]
| [
"tests/test_backend.py::test_serialize_created_at",
"tests/test_backend.py::test_serialize_dtstart",
"tests/test_backend.py::test_serializer_raises",
"tests/test_backend.py::test_supported_fields_are_serializeable",
"tests/test_model.py::test_querying",
"tests/test_model.py::test_retain_tz",
"tests/test_model.py::test_change_paths",
"tests/test_model.py::test_list_displayname",
"tests/test_model.py::test_list_colour",
"tests/test_model.py::test_list_no_colour",
"tests/test_model.py::test_database_priority_sorting",
"tests/test_model.py::test_retain_unknown_fields",
"tests/test_model.py::test_todo_setters",
"tests/test_model.py::test_is_completed",
"tests/test_model.py::test_todo_filename_absolute_path",
"tests/test_model.py::test_list_equality",
"tests/test_model.py::test_clone",
"tests/test_model.py::test_todos_startable",
"tests/test_model.py::test_filename_uid_colision",
"tests/test_model.py::test_hide_cancelled",
"tests/test_model.py::test_illegal_start_suppression",
"tests/test_model.py::test_default_status"
]
| []
| ISC License | 1,145 | [
"todoman/model.py"
]
| [
"todoman/model.py"
]
|
|
bmcfee__pumpp-59 | 6be75b4724c19cec1f5edb3c2c438433ca4604ac | 2017-04-04 12:37:19 | 797a732eb907bd816b75b10da4707517789e871d | diff --git a/pumpp/feature/__init__.py b/pumpp/feature/__init__.py
index b3a9b34..197a14a 100644
--- a/pumpp/feature/__init__.py
+++ b/pumpp/feature/__init__.py
@@ -11,6 +11,9 @@ Feature extractors
CQT
CQTMag
CQTPhaseDiff
+ HCQT
+ HCQTMag
+ HCQTPhaseDiff
STFT
STFTMag
STFTPhaseDiff
diff --git a/pumpp/feature/base.py b/pumpp/feature/base.py
index 98e3d58..299f05d 100644
--- a/pumpp/feature/base.py
+++ b/pumpp/feature/base.py
@@ -44,15 +44,15 @@ class FeatureExtractor(Scope):
self.hop_length = hop_length
self.conv = conv
- def register(self, key, dimension, dtype):
+ def register(self, key, dimension, dtype, channels=1):
shape = [None, dimension]
if self.conv in ('channels_last', 'tf'):
- shape.append(1)
+ shape.append(channels)
elif self.conv in ('channels_first', 'th'):
- shape.insert(0, 1)
+ shape.insert(0, channels)
super(FeatureExtractor, self).register(key, shape, dtype)
diff --git a/pumpp/feature/cqt.py b/pumpp/feature/cqt.py
index a5b4c84..0155b91 100644
--- a/pumpp/feature/cqt.py
+++ b/pumpp/feature/cqt.py
@@ -5,8 +5,10 @@ import numpy as np
from librosa import cqt, magphase, note_to_hz, amplitude_to_db
from .base import FeatureExtractor
+from ..exceptions import ParameterError
-__all__ = ['CQT', 'CQTMag', 'CQTPhaseDiff']
+__all__ = ['CQT', 'CQTMag', 'CQTPhaseDiff',
+ 'HCQT', 'HCQTMag', 'HCQTPhaseDiff']
class CQT(FeatureExtractor):
@@ -151,3 +153,191 @@ class CQTPhaseDiff(CQT):
data = super(CQTPhaseDiff, self).transform_audio(y)
data['dphase'] = self.phase_diff(data.pop('phase'))
return data
+
+
+class HCQT(FeatureExtractor):
+ '''Harmonic Constant-Q transform
+
+ Attributes
+ ----------
+ name : str
+ The name for this feature extractor
+
+ sr : number > 0
+ The sampling rate of audio
+
+ hop_length : int > 0
+ The number of samples between CQT frames
+
+ n_octaves : int > 0
+ The number of octaves in the CQT
+
+ over_sample : int > 0
+ The amount of frequency oversampling (bins per semitone)
+
+ fmin : float > 0
+ The minimum frequency of the CQT
+
+ harmonics : list of int >= 1
+ The list of harmonics to compute
+
+ log : boolean
+ If `True`, scale the magnitude to decibels
+
+ Otherwise, use linear magnitude
+
+ conv : {'tf', 'th', 'channels_last', 'channels_first', None}
+ convolution dimension ordering:
+
+ - 'channels_last' for tensorflow-style 2D convolution
+ - 'tf' equivalent to 'channels_last'
+ - 'channels_first' for theano-style 2D convolution
+ - 'th' equivalent to 'channels_first'
+
+ '''
+ def __init__(self, name, sr, hop_length, n_octaves=8, over_sample=3,
+ fmin=None, harmonics=None, log=False, conv='channels_last'):
+
+ if conv not in ('channels_last', 'tf', 'channels_first', 'th'):
+ raise ParameterError('Invalid conv={}'.format(conv))
+
+ super(HCQT, self).__init__(name, sr, hop_length, conv=conv)
+
+ if fmin is None:
+ fmin = note_to_hz('C1')
+
+ if harmonics is None:
+ harmonics = [1]
+ else:
+ harmonics = list(harmonics)
+ if not all(isinstance(_, int) and _ > 0 for _ in harmonics):
+ raise ParameterError('Invalid harmonics={}'.format(harmonics))
+
+ self.n_octaves = n_octaves
+ self.over_sample = over_sample
+ self.fmin = fmin
+ self.log = log
+ self.harmonics = harmonics
+
+ n_bins = n_octaves * 12 * over_sample
+ self.register('mag', n_bins, np.float32, channels=len(harmonics))
+ self.register('phase', n_bins, np.float32, channels=len(harmonics))
+
+ def transform_audio(self, y):
+ '''Compute the HCQT
+
+ Parameters
+ ----------
+ y : np.ndarray
+ The audio buffer
+
+ Returns
+ -------
+ data : dict
+ data['mag'] : np.ndarray, shape = (n_frames, n_bins, n_harmonics)
+ The CQT magnitude
+
+ data['phase']: np.ndarray, shape = mag.shape
+ The CQT phase
+ '''
+ cqtm, phase = [], []
+
+ for h in self.harmonics:
+ C, P = magphase(cqt(y=y,
+ sr=self.sr,
+ hop_length=self.hop_length,
+ fmin=self.fmin * h,
+ n_bins=(self.n_octaves *
+ self.over_sample * 12),
+ bins_per_octave=(self.over_sample * 12)))
+ if self.log:
+ C = amplitude_to_db(C, ref=np.max)
+ cqtm.append(C)
+ phase.append(P)
+
+ cqtm = np.asarray(cqtm).astype(np.float32)
+ phase = np.angle(np.asarray(phase)).astype(np.float32)
+
+ return {'mag': self._index(cqtm),
+ 'phase': self._index(phase)}
+
+ def _index(self, value):
+ '''Rearrange a tensor according to the convolution mode
+
+ Input is assumed to be in (channels, bins, time) format.
+ '''
+
+ if self.conv in ('channels_last', 'tf'):
+ return np.transpose(value, (2, 1, 0))
+
+ else: # self.conv in ('channels_first', 'th')
+ return np.transpose(value, (0, 2, 1))
+
+
+class HCQTMag(HCQT):
+ '''Magnitude HCQT
+
+ See Also
+ --------
+ HCQT
+ '''
+
+ def __init__(self, *args, **kwargs):
+ super(HCQTMag, self).__init__(*args, **kwargs)
+ self.pop('phase')
+
+ def transform_audio(self, y):
+ '''Compute HCQT magnitude.
+
+ Parameters
+ ----------
+ y : np.ndarray
+ the audio buffer
+
+ Returns
+ -------
+ data : dict
+ data['mag'] : np.ndarray, shape=(n_frames, n_bins)
+ The CQT magnitude
+ '''
+ data = super(HCQTMag, self).transform_audio(y)
+ data.pop('phase')
+ return data
+
+
+class HCQTPhaseDiff(HCQT):
+ '''HCQT with unwrapped phase differentials
+
+ See Also
+ --------
+ HCQT
+ '''
+ def __init__(self, *args, **kwargs):
+ super(HCQTPhaseDiff, self).__init__(*args, **kwargs)
+ phase_field = self.pop('phase')
+
+ self.register('dphase',
+ self.n_octaves * 12 * self.over_sample,
+ phase_field.dtype,
+ channels=len(self.harmonics))
+
+ def transform_audio(self, y):
+ '''Compute the HCQT with unwrapped phase
+
+ Parameters
+ ----------
+ y : np.ndarray
+ The audio buffer
+
+ Returns
+ -------
+ data : dict
+ data['mag'] : np.ndarray, shape=(n_frames, n_bins)
+ CQT magnitude
+
+ data['dphase'] : np.ndarray, shape=(n_frames, n_bins)
+ Unwrapped phase differential
+ '''
+ data = super(HCQTPhaseDiff, self).transform_audio(y)
+ data['dphase'] = self.phase_diff(data.pop('phase'))
+ return data
| harmonic cqt | bmcfee/pumpp | diff --git a/tests/test_feature.py b/tests/test_feature.py
index d3b5f6b..304358e 100644
--- a/tests/test_feature.py
+++ b/tests/test_feature.py
@@ -66,22 +66,37 @@ def conv(request):
return request.param
-
@pytest.fixture(params=[False, True])
def log(request):
return request.param
[email protected](params=['tf', 'th', 'channels_last', 'channels_first',
+ pytest.mark.xfail(None,
+ raises=pumpp.ParameterError),
+ pytest.mark.xfail('bad mode',
+ raises=pumpp.ParameterError)])
+def hconv(request):
+ return request.param
+
+
[email protected](params=[None, [1], [1, 2], [1, 2, 3],
+ pytest.mark.xfail([-1], raises=pumpp.ParameterError),
+ pytest.mark.xfail('bad harmonics',
+ raises=pumpp.ParameterError)])
+def harmonics(request):
+ return request.param
+
# STFT features
-def __check_shape(fields, key, dim, conv):
+def __check_shape(fields, key, dim, conv, channels=1):
if conv is None:
assert fields[key].shape == (None, dim)
elif conv in ('channels_last', 'tf'):
- assert fields[key].shape == (None, dim, 1)
+ assert fields[key].shape == (None, dim, channels)
elif conv in ('channels_first', 'th'):
- assert fields[key].shape == (1, None, dim)
+ assert fields[key].shape == (channels, None, dim)
def test_feature_stft_fields(SR, HOP_LENGTH, n_fft, conv, log):
@@ -246,7 +261,8 @@ def test_feature_cqtmag_fields(SR, HOP_LENGTH, over_sample, n_octaves, conv):
assert ext.fields['cqt/mag'].dtype is np.float32
-def test_feature_cqtphasediff_fields(SR, HOP_LENGTH, over_sample, n_octaves, conv):
+def test_feature_cqtphasediff_fields(SR, HOP_LENGTH, over_sample, n_octaves,
+ conv):
ext = pumpp.feature.CQTPhaseDiff(name='cqt',
sr=SR, hop_length=HOP_LENGTH,
@@ -269,6 +285,7 @@ def test_feature_cqt(audio, SR, HOP_LENGTH, over_sample, n_octaves, conv, log):
sr=SR, hop_length=HOP_LENGTH,
n_octaves=n_octaves,
over_sample=over_sample,
+ log=log,
conv=conv)
output = ext.transform(**audio)
@@ -280,12 +297,14 @@ def test_feature_cqt(audio, SR, HOP_LENGTH, over_sample, n_octaves, conv, log):
assert type_match(output[key].dtype, ext.fields[key].dtype)
-def test_feature_cqtmag(audio, SR, HOP_LENGTH, over_sample, n_octaves, conv, log):
+def test_feature_cqtmag(audio, SR, HOP_LENGTH, over_sample, n_octaves, conv,
+ log):
ext = pumpp.feature.CQTMag(name='cqt',
sr=SR, hop_length=HOP_LENGTH,
n_octaves=n_octaves,
over_sample=over_sample,
+ log=log,
conv=conv)
output = ext.transform(**audio)
@@ -297,12 +316,14 @@ def test_feature_cqtmag(audio, SR, HOP_LENGTH, over_sample, n_octaves, conv, log
assert type_match(output[key].dtype, ext.fields[key].dtype)
-def test_feature_cqtphasediff(audio, SR, HOP_LENGTH, over_sample, n_octaves, conv, log):
+def test_feature_cqtphasediff(audio, SR, HOP_LENGTH, over_sample, n_octaves,
+ conv, log):
ext = pumpp.feature.CQTPhaseDiff(name='cqt',
sr=SR, hop_length=HOP_LENGTH,
n_octaves=n_octaves,
over_sample=over_sample,
+ log=log,
conv=conv)
output = ext.transform(**audio)
@@ -336,7 +357,6 @@ def test_feature_tempogram(audio, SR, HOP_LENGTH, WIN_LENGTH, conv):
win_length=WIN_LENGTH,
conv=conv)
-
output = ext.transform(**audio)
assert set(output.keys()) == set(ext.fields.keys())
@@ -376,3 +396,138 @@ def test_feature_temposcale(audio, SR, HOP_LENGTH, WIN_LENGTH, N_FMT, conv):
for key in ext.fields:
assert shape_match(output[key].shape[1:], ext.fields[key].shape)
assert type_match(output[key].dtype, ext.fields[key].dtype)
+
+
+# HCQT features
+
+def test_feature_hcqt_fields(SR, HOP_LENGTH, over_sample, n_octaves,
+ hconv, harmonics):
+
+ ext = pumpp.feature.HCQT(name='hcqt',
+ sr=SR, hop_length=HOP_LENGTH,
+ n_octaves=n_octaves,
+ over_sample=over_sample,
+ conv=hconv,
+ harmonics=harmonics)
+
+ # Check the fields
+ assert set(ext.fields.keys()) == set(['hcqt/mag', 'hcqt/phase'])
+
+ if not harmonics:
+ channels = 1
+ else:
+ channels = len(harmonics)
+
+ __check_shape(ext.fields, 'hcqt/mag', over_sample * n_octaves * 12,
+ hconv, channels=channels)
+ __check_shape(ext.fields, 'hcqt/phase', over_sample * n_octaves * 12,
+ hconv, channels=channels)
+ assert ext.fields['hcqt/mag'].dtype is np.float32
+ assert ext.fields['hcqt/phase'].dtype is np.float32
+
+
+def test_feature_hcqtmag_fields(SR, HOP_LENGTH, over_sample, n_octaves,
+ hconv, harmonics):
+
+ ext = pumpp.feature.HCQTMag(name='hcqt',
+ sr=SR, hop_length=HOP_LENGTH,
+ n_octaves=n_octaves,
+ over_sample=over_sample,
+ conv=hconv, harmonics=harmonics)
+
+ if not harmonics:
+ channels = 1
+ else:
+ channels = len(harmonics)
+
+ # Check the fields
+ assert set(ext.fields.keys()) == set(['hcqt/mag'])
+
+ __check_shape(ext.fields, 'hcqt/mag', over_sample * n_octaves * 12,
+ hconv, channels=channels)
+ assert ext.fields['hcqt/mag'].dtype is np.float32
+
+
+def test_feature_hcqtphasediff_fields(SR, HOP_LENGTH, over_sample, n_octaves,
+ hconv, harmonics):
+
+ ext = pumpp.feature.HCQTPhaseDiff(name='hcqt',
+ sr=SR, hop_length=HOP_LENGTH,
+ n_octaves=n_octaves,
+ over_sample=over_sample,
+ conv=hconv, harmonics=harmonics)
+
+ if not harmonics:
+ channels = 1
+ else:
+ channels = len(harmonics)
+
+ # Check the fields
+ assert set(ext.fields.keys()) == set(['hcqt/mag', 'hcqt/dphase'])
+
+ __check_shape(ext.fields, 'hcqt/mag', over_sample * n_octaves * 12,
+ hconv, channels=channels)
+ __check_shape(ext.fields, 'hcqt/dphase', over_sample * n_octaves * 12,
+ hconv, channels=channels)
+ assert ext.fields['hcqt/mag'].dtype is np.float32
+ assert ext.fields['hcqt/dphase'].dtype is np.float32
+
+
+def test_feature_hcqt(audio, SR, HOP_LENGTH, over_sample, n_octaves,
+ hconv, log, harmonics):
+
+ ext = pumpp.feature.HCQT(name='hcqt',
+ sr=SR, hop_length=HOP_LENGTH,
+ n_octaves=n_octaves,
+ over_sample=over_sample,
+ conv=hconv,
+ log=log,
+ harmonics=harmonics)
+
+ output = ext.transform(**audio)
+
+ assert set(output.keys()) == set(ext.fields.keys())
+
+ for key in ext.fields:
+ assert shape_match(output[key].shape[1:], ext.fields[key].shape)
+ assert type_match(output[key].dtype, ext.fields[key].dtype)
+
+
+def test_feature_hcqtmag(audio, SR, HOP_LENGTH, over_sample, n_octaves,
+ hconv, log, harmonics):
+
+ ext = pumpp.feature.HCQTMag(name='hcqt',
+ sr=SR, hop_length=HOP_LENGTH,
+ n_octaves=n_octaves,
+ over_sample=over_sample,
+ conv=hconv,
+ log=log,
+ harmonics=harmonics)
+
+ output = ext.transform(**audio)
+
+ assert set(output.keys()) == set(ext.fields.keys())
+
+ for key in ext.fields:
+ assert shape_match(output[key].shape[1:], ext.fields[key].shape)
+ assert type_match(output[key].dtype, ext.fields[key].dtype)
+
+
+def test_feature_hcqtphasediff(audio, SR, HOP_LENGTH, over_sample, n_octaves,
+ hconv, log, harmonics):
+
+ ext = pumpp.feature.HCQTPhaseDiff(name='hcqt',
+ sr=SR, hop_length=HOP_LENGTH,
+ n_octaves=n_octaves,
+ over_sample=over_sample,
+ conv=hconv,
+ log=log,
+ harmonics=harmonics)
+
+ output = ext.transform(**audio)
+
+ assert set(output.keys()) == set(ext.fields.keys())
+
+ for key in ext.fields:
+ assert shape_match(output[key].shape[1:], ext.fields[key].shape)
+ assert type_match(output[key].dtype, ext.fields[key].dtype)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 3,
"test_score": 3
},
"num_modified_files": 3
} | 0.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[docs,tests]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y ffmpeg"
],
"python": "3.5",
"reqs_path": [
"docs/requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | absl-py==0.15.0
alabaster==0.7.13
appdirs==1.4.4
astunparse==1.6.3
attrs==22.2.0
audioread==3.0.1
Babel==2.11.0
cached-property==1.5.2
cachetools==4.2.4
certifi==2021.5.30
cffi==1.15.1
charset-normalizer==2.0.12
clang==5.0
coverage==6.2
dataclasses==0.8
decorator==5.1.1
docutils==0.18.1
flatbuffers==1.12
gast==0.4.0
google-auth==1.35.0
google-auth-oauthlib==0.4.6
google-pasta==0.2.0
grpcio==1.48.2
h5py==3.1.0
idna==3.10
imagesize==1.4.1
importlib-metadata==4.8.3
importlib-resources==5.4.0
iniconfig==1.1.1
jams==0.3.4
Jinja2==3.0.3
joblib==1.1.1
jsonschema==3.2.0
keras==2.6.0
Keras-Preprocessing==1.1.2
librosa==0.9.2
llvmlite==0.36.0
Markdown==3.3.7
MarkupSafe==2.0.1
mir_eval==0.8.2
numba==0.53.1
numpy==1.19.5
numpydoc==1.1.0
oauthlib==3.2.2
opt-einsum==3.3.0
packaging==21.3
pandas==1.1.5
pluggy==1.0.0
pooch==1.6.0
protobuf==3.19.6
-e git+https://github.com/bmcfee/pumpp.git@6be75b4724c19cec1f5edb3c2c438433ca4604ac#egg=pumpp
py==1.11.0
pyasn1==0.5.1
pyasn1-modules==0.3.0
pycparser==2.21
Pygments==2.14.0
pyparsing==3.1.4
pyrsistent==0.18.0
pytest==7.0.1
pytest-cov==4.0.0
python-dateutil==2.9.0.post0
pytz==2025.2
requests==2.27.1
requests-oauthlib==2.0.0
resampy==0.4.3
rsa==4.9
scikit-learn==0.24.2
scipy==1.5.4
six==1.15.0
snowballstemmer==2.2.0
sortedcontainers==2.4.0
soundfile==0.13.1
Sphinx==5.3.0
sphinxcontrib-applehelp==1.0.2
sphinxcontrib-devhelp==1.0.2
sphinxcontrib-htmlhelp==2.0.0
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==1.0.3
sphinxcontrib-serializinghtml==1.1.5
tensorboard==2.6.0
tensorboard-data-server==0.6.1
tensorboard-plugin-wit==1.8.1
tensorflow==2.6.2
tensorflow-estimator==2.6.0
termcolor==1.1.0
threadpoolctl==3.1.0
tomli==1.2.3
typing-extensions==3.7.4.3
urllib3==1.26.20
Werkzeug==2.0.3
wrapt==1.12.1
zipp==3.6.0
| name: pumpp
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- absl-py==0.15.0
- alabaster==0.7.13
- appdirs==1.4.4
- astunparse==1.6.3
- attrs==22.2.0
- audioread==3.0.1
- babel==2.11.0
- cached-property==1.5.2
- cachetools==4.2.4
- cffi==1.15.1
- charset-normalizer==2.0.12
- clang==5.0
- coverage==6.2
- dataclasses==0.8
- decorator==5.1.1
- docutils==0.18.1
- flatbuffers==1.12
- gast==0.4.0
- google-auth==1.35.0
- google-auth-oauthlib==0.4.6
- google-pasta==0.2.0
- grpcio==1.48.2
- h5py==3.1.0
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==4.8.3
- importlib-resources==5.4.0
- iniconfig==1.1.1
- jams==0.3.4
- jinja2==3.0.3
- joblib==1.1.1
- jsonschema==3.2.0
- keras==2.6.0
- keras-preprocessing==1.1.2
- librosa==0.9.2
- llvmlite==0.36.0
- markdown==3.3.7
- markupsafe==2.0.1
- mir-eval==0.8.2
- numba==0.53.1
- numpy==1.19.5
- numpydoc==1.1.0
- oauthlib==3.2.2
- opt-einsum==3.3.0
- packaging==21.3
- pandas==1.1.5
- pluggy==1.0.0
- pooch==1.6.0
- protobuf==3.19.6
- py==1.11.0
- pyasn1==0.5.1
- pyasn1-modules==0.3.0
- pycparser==2.21
- pygments==2.14.0
- pyparsing==3.1.4
- pyrsistent==0.18.0
- pytest==7.0.1
- pytest-cov==4.0.0
- python-dateutil==2.9.0.post0
- pytz==2025.2
- requests==2.27.1
- requests-oauthlib==2.0.0
- resampy==0.4.3
- rsa==4.9
- scikit-learn==0.24.2
- scipy==1.5.4
- six==1.15.0
- snowballstemmer==2.2.0
- sortedcontainers==2.4.0
- soundfile==0.13.1
- sphinx==5.3.0
- sphinxcontrib-applehelp==1.0.2
- sphinxcontrib-devhelp==1.0.2
- sphinxcontrib-htmlhelp==2.0.0
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==1.0.3
- sphinxcontrib-serializinghtml==1.1.5
- tensorboard==2.6.0
- tensorboard-data-server==0.6.1
- tensorboard-plugin-wit==1.8.1
- tensorflow==2.6.2
- tensorflow-estimator==2.6.0
- termcolor==1.1.0
- threadpoolctl==3.1.0
- tomli==1.2.3
- typing-extensions==3.7.4.3
- urllib3==1.26.20
- werkzeug==2.0.3
- wrapt==1.12.1
- zipp==3.6.0
prefix: /opt/conda/envs/pumpp
| [
"tests/test_feature.py::test_feature_hcqt_fields[1-1-tf-None]",
"tests/test_feature.py::test_feature_hcqt_fields[1-1-tf-harmonics1]",
"tests/test_feature.py::test_feature_hcqt_fields[1-1-tf-harmonics2]",
"tests/test_feature.py::test_feature_hcqt_fields[1-1-tf-harmonics3]",
"tests/test_feature.py::test_feature_hcqt_fields[1-1-th-None]",
"tests/test_feature.py::test_feature_hcqt_fields[1-1-th-harmonics1]",
"tests/test_feature.py::test_feature_hcqt_fields[1-1-th-harmonics2]",
"tests/test_feature.py::test_feature_hcqt_fields[1-1-th-harmonics3]",
"tests/test_feature.py::test_feature_hcqt_fields[1-1-channels_last-None]",
"tests/test_feature.py::test_feature_hcqt_fields[1-1-channels_last-harmonics1]",
"tests/test_feature.py::test_feature_hcqt_fields[1-1-channels_last-harmonics2]",
"tests/test_feature.py::test_feature_hcqt_fields[1-1-channels_last-harmonics3]",
"tests/test_feature.py::test_feature_hcqt_fields[1-1-channels_first-None]",
"tests/test_feature.py::test_feature_hcqt_fields[1-1-channels_first-harmonics1]",
"tests/test_feature.py::test_feature_hcqt_fields[1-1-channels_first-harmonics2]",
"tests/test_feature.py::test_feature_hcqt_fields[1-1-channels_first-harmonics3]",
"tests/test_feature.py::test_feature_hcqt_fields[1-4-tf-None]",
"tests/test_feature.py::test_feature_hcqt_fields[1-4-tf-harmonics1]",
"tests/test_feature.py::test_feature_hcqt_fields[1-4-tf-harmonics2]",
"tests/test_feature.py::test_feature_hcqt_fields[1-4-tf-harmonics3]",
"tests/test_feature.py::test_feature_hcqt_fields[1-4-th-None]",
"tests/test_feature.py::test_feature_hcqt_fields[1-4-th-harmonics1]",
"tests/test_feature.py::test_feature_hcqt_fields[1-4-th-harmonics2]",
"tests/test_feature.py::test_feature_hcqt_fields[1-4-th-harmonics3]",
"tests/test_feature.py::test_feature_hcqt_fields[1-4-channels_last-None]",
"tests/test_feature.py::test_feature_hcqt_fields[1-4-channels_last-harmonics1]",
"tests/test_feature.py::test_feature_hcqt_fields[1-4-channels_last-harmonics2]",
"tests/test_feature.py::test_feature_hcqt_fields[1-4-channels_last-harmonics3]",
"tests/test_feature.py::test_feature_hcqt_fields[1-4-channels_first-None]",
"tests/test_feature.py::test_feature_hcqt_fields[1-4-channels_first-harmonics1]",
"tests/test_feature.py::test_feature_hcqt_fields[1-4-channels_first-harmonics2]",
"tests/test_feature.py::test_feature_hcqt_fields[1-4-channels_first-harmonics3]",
"tests/test_feature.py::test_feature_hcqt_fields[3-1-tf-None]",
"tests/test_feature.py::test_feature_hcqt_fields[3-1-tf-harmonics1]",
"tests/test_feature.py::test_feature_hcqt_fields[3-1-tf-harmonics2]",
"tests/test_feature.py::test_feature_hcqt_fields[3-1-tf-harmonics3]",
"tests/test_feature.py::test_feature_hcqt_fields[3-1-th-None]",
"tests/test_feature.py::test_feature_hcqt_fields[3-1-th-harmonics1]",
"tests/test_feature.py::test_feature_hcqt_fields[3-1-th-harmonics2]",
"tests/test_feature.py::test_feature_hcqt_fields[3-1-th-harmonics3]",
"tests/test_feature.py::test_feature_hcqt_fields[3-1-channels_last-None]",
"tests/test_feature.py::test_feature_hcqt_fields[3-1-channels_last-harmonics1]",
"tests/test_feature.py::test_feature_hcqt_fields[3-1-channels_last-harmonics2]",
"tests/test_feature.py::test_feature_hcqt_fields[3-1-channels_last-harmonics3]",
"tests/test_feature.py::test_feature_hcqt_fields[3-1-channels_first-None]",
"tests/test_feature.py::test_feature_hcqt_fields[3-1-channels_first-harmonics1]",
"tests/test_feature.py::test_feature_hcqt_fields[3-1-channels_first-harmonics2]",
"tests/test_feature.py::test_feature_hcqt_fields[3-1-channels_first-harmonics3]",
"tests/test_feature.py::test_feature_hcqt_fields[3-4-tf-None]",
"tests/test_feature.py::test_feature_hcqt_fields[3-4-tf-harmonics1]",
"tests/test_feature.py::test_feature_hcqt_fields[3-4-tf-harmonics2]",
"tests/test_feature.py::test_feature_hcqt_fields[3-4-tf-harmonics3]",
"tests/test_feature.py::test_feature_hcqt_fields[3-4-th-None]",
"tests/test_feature.py::test_feature_hcqt_fields[3-4-th-harmonics1]",
"tests/test_feature.py::test_feature_hcqt_fields[3-4-th-harmonics2]",
"tests/test_feature.py::test_feature_hcqt_fields[3-4-th-harmonics3]",
"tests/test_feature.py::test_feature_hcqt_fields[3-4-channels_last-None]",
"tests/test_feature.py::test_feature_hcqt_fields[3-4-channels_last-harmonics1]",
"tests/test_feature.py::test_feature_hcqt_fields[3-4-channels_last-harmonics2]",
"tests/test_feature.py::test_feature_hcqt_fields[3-4-channels_last-harmonics3]",
"tests/test_feature.py::test_feature_hcqt_fields[3-4-channels_first-None]",
"tests/test_feature.py::test_feature_hcqt_fields[3-4-channels_first-harmonics1]",
"tests/test_feature.py::test_feature_hcqt_fields[3-4-channels_first-harmonics2]",
"tests/test_feature.py::test_feature_hcqt_fields[3-4-channels_first-harmonics3]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-1-tf-None]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-1-tf-harmonics1]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-1-tf-harmonics2]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-1-tf-harmonics3]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-1-th-None]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-1-th-harmonics1]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-1-th-harmonics2]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-1-th-harmonics3]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-1-channels_last-None]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-1-channels_last-harmonics1]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-1-channels_last-harmonics2]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-1-channels_last-harmonics3]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-1-channels_first-None]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-1-channels_first-harmonics1]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-1-channels_first-harmonics2]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-1-channels_first-harmonics3]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-4-tf-None]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-4-tf-harmonics1]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-4-tf-harmonics2]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-4-tf-harmonics3]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-4-th-None]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-4-th-harmonics1]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-4-th-harmonics2]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-4-th-harmonics3]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-4-channels_last-None]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-4-channels_last-harmonics1]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-4-channels_last-harmonics2]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-4-channels_last-harmonics3]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-4-channels_first-None]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-4-channels_first-harmonics1]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-4-channels_first-harmonics2]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-4-channels_first-harmonics3]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-1-tf-None]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-1-tf-harmonics1]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-1-tf-harmonics2]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-1-tf-harmonics3]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-1-th-None]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-1-th-harmonics1]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-1-th-harmonics2]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-1-th-harmonics3]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-1-channels_last-None]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-1-channels_last-harmonics1]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-1-channels_last-harmonics2]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-1-channels_last-harmonics3]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-1-channels_first-None]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-1-channels_first-harmonics1]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-1-channels_first-harmonics2]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-1-channels_first-harmonics3]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-4-tf-None]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-4-tf-harmonics1]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-4-tf-harmonics2]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-4-tf-harmonics3]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-4-th-None]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-4-th-harmonics1]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-4-th-harmonics2]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-4-th-harmonics3]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-4-channels_last-None]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-4-channels_last-harmonics1]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-4-channels_last-harmonics2]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-4-channels_last-harmonics3]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-4-channels_first-None]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-4-channels_first-harmonics1]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-4-channels_first-harmonics2]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-4-channels_first-harmonics3]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-1-tf-None]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-1-tf-harmonics1]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-1-tf-harmonics2]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-1-tf-harmonics3]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-1-th-None]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-1-th-harmonics1]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-1-th-harmonics2]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-1-th-harmonics3]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-1-channels_last-None]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-1-channels_last-harmonics1]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-1-channels_last-harmonics2]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-1-channels_last-harmonics3]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-1-channels_first-None]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-1-channels_first-harmonics1]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-1-channels_first-harmonics2]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-1-channels_first-harmonics3]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-4-tf-None]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-4-tf-harmonics1]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-4-tf-harmonics2]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-4-tf-harmonics3]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-4-th-None]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-4-th-harmonics1]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-4-th-harmonics2]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-4-th-harmonics3]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-4-channels_last-None]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-4-channels_last-harmonics1]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-4-channels_last-harmonics2]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-4-channels_last-harmonics3]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-4-channels_first-None]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-4-channels_first-harmonics1]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-4-channels_first-harmonics2]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-4-channels_first-harmonics3]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-1-tf-None]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-1-tf-harmonics1]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-1-tf-harmonics2]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-1-tf-harmonics3]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-1-th-None]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-1-th-harmonics1]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-1-th-harmonics2]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-1-th-harmonics3]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-1-channels_last-None]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-1-channels_last-harmonics1]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-1-channels_last-harmonics2]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-1-channels_last-harmonics3]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-1-channels_first-None]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-1-channels_first-harmonics1]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-1-channels_first-harmonics2]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-1-channels_first-harmonics3]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-4-tf-None]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-4-tf-harmonics1]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-4-tf-harmonics2]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-4-tf-harmonics3]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-4-th-None]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-4-th-harmonics1]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-4-th-harmonics2]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-4-th-harmonics3]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-4-channels_last-None]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-4-channels_last-harmonics1]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-4-channels_last-harmonics2]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-4-channels_last-harmonics3]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-4-channels_first-None]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-4-channels_first-harmonics1]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-4-channels_first-harmonics2]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-4-channels_first-harmonics3]"
]
| [
"tests/test_feature.py::test_feature_stft_fields[512-conv5-False]",
"tests/test_feature.py::test_feature_stft_fields[512-conv5-True]",
"tests/test_feature.py::test_feature_stft_fields[1024-conv5-False]",
"tests/test_feature.py::test_feature_stft_fields[1024-conv5-True]",
"tests/test_feature.py::test_feature_stft_mag_fields[512-conv5]",
"tests/test_feature.py::test_feature_stft_mag_fields[1024-conv5]",
"tests/test_feature.py::test_feature_stft_phasediff_fields[512-conv5]",
"tests/test_feature.py::test_feature_stft_phasediff_fields[1024-conv5]",
"tests/test_feature.py::test_feature_mel_fields[512-32-conv5]",
"tests/test_feature.py::test_feature_mel_fields[512-128-conv5]",
"tests/test_feature.py::test_feature_mel_fields[1024-32-conv5]",
"tests/test_feature.py::test_feature_mel_fields[1024-128-conv5]",
"tests/test_feature.py::test_feature_cqt_fields[1-1-conv5]",
"tests/test_feature.py::test_feature_cqt_fields[1-4-conv5]",
"tests/test_feature.py::test_feature_cqt_fields[3-1-conv5]",
"tests/test_feature.py::test_feature_cqt_fields[3-4-conv5]",
"tests/test_feature.py::test_feature_cqtmag_fields[1-1-conv5]",
"tests/test_feature.py::test_feature_cqtmag_fields[1-4-conv5]",
"tests/test_feature.py::test_feature_cqtmag_fields[3-1-conv5]",
"tests/test_feature.py::test_feature_cqtmag_fields[3-4-conv5]",
"tests/test_feature.py::test_feature_cqtphasediff_fields[1-1-conv5]",
"tests/test_feature.py::test_feature_cqtphasediff_fields[1-4-conv5]",
"tests/test_feature.py::test_feature_cqtphasediff_fields[3-1-conv5]",
"tests/test_feature.py::test_feature_cqtphasediff_fields[3-4-conv5]",
"tests/test_feature.py::test_feature_tempogram_fields[192-conv5]",
"tests/test_feature.py::test_feature_tempogram_fields[384-conv5]",
"tests/test_feature.py::test_feature_temposcale_fields[192-16-conv5]",
"tests/test_feature.py::test_feature_temposcale_fields[192-128-conv5]",
"tests/test_feature.py::test_feature_temposcale_fields[384-16-conv5]",
"tests/test_feature.py::test_feature_temposcale_fields[384-128-conv5]",
"tests/test_feature.py::test_feature_hcqt_fields[1-1-tf-harmonics4]",
"tests/test_feature.py::test_feature_hcqt_fields[1-1-tf-harmonics5]",
"tests/test_feature.py::test_feature_hcqt_fields[1-1-th-harmonics4]",
"tests/test_feature.py::test_feature_hcqt_fields[1-1-th-harmonics5]",
"tests/test_feature.py::test_feature_hcqt_fields[1-1-channels_last-harmonics4]",
"tests/test_feature.py::test_feature_hcqt_fields[1-1-channels_last-harmonics5]",
"tests/test_feature.py::test_feature_hcqt_fields[1-1-channels_first-harmonics4]",
"tests/test_feature.py::test_feature_hcqt_fields[1-1-channels_first-harmonics5]",
"tests/test_feature.py::test_feature_hcqt_fields[1-1-hconv4-None]",
"tests/test_feature.py::test_feature_hcqt_fields[1-1-hconv4-harmonics1]",
"tests/test_feature.py::test_feature_hcqt_fields[1-1-hconv4-harmonics2]",
"tests/test_feature.py::test_feature_hcqt_fields[1-1-hconv4-harmonics3]",
"tests/test_feature.py::test_feature_hcqt_fields[1-1-hconv4-harmonics4]",
"tests/test_feature.py::test_feature_hcqt_fields[1-1-hconv4-harmonics5]",
"tests/test_feature.py::test_feature_hcqt_fields[1-1-hconv5-None]",
"tests/test_feature.py::test_feature_hcqt_fields[1-1-hconv5-harmonics1]",
"tests/test_feature.py::test_feature_hcqt_fields[1-1-hconv5-harmonics2]",
"tests/test_feature.py::test_feature_hcqt_fields[1-1-hconv5-harmonics3]",
"tests/test_feature.py::test_feature_hcqt_fields[1-1-hconv5-harmonics4]",
"tests/test_feature.py::test_feature_hcqt_fields[1-1-hconv5-harmonics5]",
"tests/test_feature.py::test_feature_hcqt_fields[1-4-tf-harmonics4]",
"tests/test_feature.py::test_feature_hcqt_fields[1-4-tf-harmonics5]",
"tests/test_feature.py::test_feature_hcqt_fields[1-4-th-harmonics4]",
"tests/test_feature.py::test_feature_hcqt_fields[1-4-th-harmonics5]",
"tests/test_feature.py::test_feature_hcqt_fields[1-4-channels_last-harmonics4]",
"tests/test_feature.py::test_feature_hcqt_fields[1-4-channels_last-harmonics5]",
"tests/test_feature.py::test_feature_hcqt_fields[1-4-channels_first-harmonics4]",
"tests/test_feature.py::test_feature_hcqt_fields[1-4-channels_first-harmonics5]",
"tests/test_feature.py::test_feature_hcqt_fields[1-4-hconv4-None]",
"tests/test_feature.py::test_feature_hcqt_fields[1-4-hconv4-harmonics1]",
"tests/test_feature.py::test_feature_hcqt_fields[1-4-hconv4-harmonics2]",
"tests/test_feature.py::test_feature_hcqt_fields[1-4-hconv4-harmonics3]",
"tests/test_feature.py::test_feature_hcqt_fields[1-4-hconv4-harmonics4]",
"tests/test_feature.py::test_feature_hcqt_fields[1-4-hconv4-harmonics5]",
"tests/test_feature.py::test_feature_hcqt_fields[1-4-hconv5-None]",
"tests/test_feature.py::test_feature_hcqt_fields[1-4-hconv5-harmonics1]",
"tests/test_feature.py::test_feature_hcqt_fields[1-4-hconv5-harmonics2]",
"tests/test_feature.py::test_feature_hcqt_fields[1-4-hconv5-harmonics3]",
"tests/test_feature.py::test_feature_hcqt_fields[1-4-hconv5-harmonics4]",
"tests/test_feature.py::test_feature_hcqt_fields[1-4-hconv5-harmonics5]",
"tests/test_feature.py::test_feature_hcqt_fields[3-1-tf-harmonics4]",
"tests/test_feature.py::test_feature_hcqt_fields[3-1-tf-harmonics5]",
"tests/test_feature.py::test_feature_hcqt_fields[3-1-th-harmonics4]",
"tests/test_feature.py::test_feature_hcqt_fields[3-1-th-harmonics5]",
"tests/test_feature.py::test_feature_hcqt_fields[3-1-channels_last-harmonics4]",
"tests/test_feature.py::test_feature_hcqt_fields[3-1-channels_last-harmonics5]",
"tests/test_feature.py::test_feature_hcqt_fields[3-1-channels_first-harmonics4]",
"tests/test_feature.py::test_feature_hcqt_fields[3-1-channels_first-harmonics5]",
"tests/test_feature.py::test_feature_hcqt_fields[3-1-hconv4-None]",
"tests/test_feature.py::test_feature_hcqt_fields[3-1-hconv4-harmonics1]",
"tests/test_feature.py::test_feature_hcqt_fields[3-1-hconv4-harmonics2]",
"tests/test_feature.py::test_feature_hcqt_fields[3-1-hconv4-harmonics3]",
"tests/test_feature.py::test_feature_hcqt_fields[3-1-hconv4-harmonics4]",
"tests/test_feature.py::test_feature_hcqt_fields[3-1-hconv4-harmonics5]",
"tests/test_feature.py::test_feature_hcqt_fields[3-1-hconv5-None]",
"tests/test_feature.py::test_feature_hcqt_fields[3-1-hconv5-harmonics1]",
"tests/test_feature.py::test_feature_hcqt_fields[3-1-hconv5-harmonics2]",
"tests/test_feature.py::test_feature_hcqt_fields[3-1-hconv5-harmonics3]",
"tests/test_feature.py::test_feature_hcqt_fields[3-1-hconv5-harmonics4]",
"tests/test_feature.py::test_feature_hcqt_fields[3-1-hconv5-harmonics5]",
"tests/test_feature.py::test_feature_hcqt_fields[3-4-tf-harmonics4]",
"tests/test_feature.py::test_feature_hcqt_fields[3-4-tf-harmonics5]",
"tests/test_feature.py::test_feature_hcqt_fields[3-4-th-harmonics4]",
"tests/test_feature.py::test_feature_hcqt_fields[3-4-th-harmonics5]",
"tests/test_feature.py::test_feature_hcqt_fields[3-4-channels_last-harmonics4]",
"tests/test_feature.py::test_feature_hcqt_fields[3-4-channels_last-harmonics5]",
"tests/test_feature.py::test_feature_hcqt_fields[3-4-channels_first-harmonics4]",
"tests/test_feature.py::test_feature_hcqt_fields[3-4-channels_first-harmonics5]",
"tests/test_feature.py::test_feature_hcqt_fields[3-4-hconv4-None]",
"tests/test_feature.py::test_feature_hcqt_fields[3-4-hconv4-harmonics1]",
"tests/test_feature.py::test_feature_hcqt_fields[3-4-hconv4-harmonics2]",
"tests/test_feature.py::test_feature_hcqt_fields[3-4-hconv4-harmonics3]",
"tests/test_feature.py::test_feature_hcqt_fields[3-4-hconv4-harmonics4]",
"tests/test_feature.py::test_feature_hcqt_fields[3-4-hconv4-harmonics5]",
"tests/test_feature.py::test_feature_hcqt_fields[3-4-hconv5-None]",
"tests/test_feature.py::test_feature_hcqt_fields[3-4-hconv5-harmonics1]",
"tests/test_feature.py::test_feature_hcqt_fields[3-4-hconv5-harmonics2]",
"tests/test_feature.py::test_feature_hcqt_fields[3-4-hconv5-harmonics3]",
"tests/test_feature.py::test_feature_hcqt_fields[3-4-hconv5-harmonics4]",
"tests/test_feature.py::test_feature_hcqt_fields[3-4-hconv5-harmonics5]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-1-tf-harmonics4]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-1-tf-harmonics5]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-1-th-harmonics4]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-1-th-harmonics5]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-1-channels_last-harmonics4]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-1-channels_last-harmonics5]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-1-channels_first-harmonics4]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-1-channels_first-harmonics5]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-1-hconv4-None]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-1-hconv4-harmonics1]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-1-hconv4-harmonics2]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-1-hconv4-harmonics3]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-1-hconv4-harmonics4]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-1-hconv4-harmonics5]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-1-hconv5-None]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-1-hconv5-harmonics1]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-1-hconv5-harmonics2]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-1-hconv5-harmonics3]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-1-hconv5-harmonics4]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-1-hconv5-harmonics5]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-4-tf-harmonics4]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-4-tf-harmonics5]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-4-th-harmonics4]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-4-th-harmonics5]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-4-channels_last-harmonics4]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-4-channels_last-harmonics5]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-4-channels_first-harmonics4]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-4-channels_first-harmonics5]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-4-hconv4-None]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-4-hconv4-harmonics1]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-4-hconv4-harmonics2]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-4-hconv4-harmonics3]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-4-hconv4-harmonics4]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-4-hconv4-harmonics5]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-4-hconv5-None]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-4-hconv5-harmonics1]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-4-hconv5-harmonics2]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-4-hconv5-harmonics3]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-4-hconv5-harmonics4]",
"tests/test_feature.py::test_feature_hcqtmag_fields[1-4-hconv5-harmonics5]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-1-tf-harmonics4]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-1-tf-harmonics5]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-1-th-harmonics4]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-1-th-harmonics5]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-1-channels_last-harmonics4]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-1-channels_last-harmonics5]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-1-channels_first-harmonics4]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-1-channels_first-harmonics5]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-1-hconv4-None]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-1-hconv4-harmonics1]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-1-hconv4-harmonics2]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-1-hconv4-harmonics3]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-1-hconv4-harmonics4]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-1-hconv4-harmonics5]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-1-hconv5-None]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-1-hconv5-harmonics1]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-1-hconv5-harmonics2]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-1-hconv5-harmonics3]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-1-hconv5-harmonics4]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-1-hconv5-harmonics5]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-4-tf-harmonics4]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-4-tf-harmonics5]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-4-th-harmonics4]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-4-th-harmonics5]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-4-channels_last-harmonics4]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-4-channels_last-harmonics5]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-4-channels_first-harmonics4]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-4-channels_first-harmonics5]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-4-hconv4-None]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-4-hconv4-harmonics1]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-4-hconv4-harmonics2]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-4-hconv4-harmonics3]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-4-hconv4-harmonics4]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-4-hconv4-harmonics5]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-4-hconv5-None]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-4-hconv5-harmonics1]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-4-hconv5-harmonics2]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-4-hconv5-harmonics3]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-4-hconv5-harmonics4]",
"tests/test_feature.py::test_feature_hcqtmag_fields[3-4-hconv5-harmonics5]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-1-tf-harmonics4]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-1-tf-harmonics5]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-1-th-harmonics4]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-1-th-harmonics5]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-1-channels_last-harmonics4]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-1-channels_last-harmonics5]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-1-channels_first-harmonics4]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-1-channels_first-harmonics5]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-1-hconv4-None]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-1-hconv4-harmonics1]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-1-hconv4-harmonics2]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-1-hconv4-harmonics3]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-1-hconv4-harmonics4]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-1-hconv4-harmonics5]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-1-hconv5-None]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-1-hconv5-harmonics1]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-1-hconv5-harmonics2]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-1-hconv5-harmonics3]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-1-hconv5-harmonics4]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-1-hconv5-harmonics5]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-4-tf-harmonics4]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-4-tf-harmonics5]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-4-th-harmonics4]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-4-th-harmonics5]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-4-channels_last-harmonics4]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-4-channels_last-harmonics5]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-4-channels_first-harmonics4]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-4-channels_first-harmonics5]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-4-hconv4-None]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-4-hconv4-harmonics1]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-4-hconv4-harmonics2]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-4-hconv4-harmonics3]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-4-hconv4-harmonics4]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-4-hconv4-harmonics5]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-4-hconv5-None]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-4-hconv5-harmonics1]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-4-hconv5-harmonics2]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-4-hconv5-harmonics3]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-4-hconv5-harmonics4]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[1-4-hconv5-harmonics5]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-1-tf-harmonics4]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-1-tf-harmonics5]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-1-th-harmonics4]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-1-th-harmonics5]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-1-channels_last-harmonics4]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-1-channels_last-harmonics5]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-1-channels_first-harmonics4]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-1-channels_first-harmonics5]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-1-hconv4-None]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-1-hconv4-harmonics1]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-1-hconv4-harmonics2]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-1-hconv4-harmonics3]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-1-hconv4-harmonics4]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-1-hconv4-harmonics5]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-1-hconv5-None]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-1-hconv5-harmonics1]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-1-hconv5-harmonics2]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-1-hconv5-harmonics3]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-1-hconv5-harmonics4]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-1-hconv5-harmonics5]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-4-tf-harmonics4]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-4-tf-harmonics5]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-4-th-harmonics4]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-4-th-harmonics5]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-4-channels_last-harmonics4]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-4-channels_last-harmonics5]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-4-channels_first-harmonics4]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-4-channels_first-harmonics5]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-4-hconv4-None]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-4-hconv4-harmonics1]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-4-hconv4-harmonics2]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-4-hconv4-harmonics3]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-4-hconv4-harmonics4]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-4-hconv4-harmonics5]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-4-hconv5-None]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-4-hconv5-harmonics1]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-4-hconv5-harmonics2]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-4-hconv5-harmonics3]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-4-hconv5-harmonics4]",
"tests/test_feature.py::test_feature_hcqtphasediff_fields[3-4-hconv5-harmonics5]"
]
| [
"tests/test_feature.py::test_feature_stft_fields[512-None-False]",
"tests/test_feature.py::test_feature_stft_fields[512-None-True]",
"tests/test_feature.py::test_feature_stft_fields[512-tf-False]",
"tests/test_feature.py::test_feature_stft_fields[512-tf-True]",
"tests/test_feature.py::test_feature_stft_fields[512-th-False]",
"tests/test_feature.py::test_feature_stft_fields[512-th-True]",
"tests/test_feature.py::test_feature_stft_fields[512-channels_last-False]",
"tests/test_feature.py::test_feature_stft_fields[512-channels_last-True]",
"tests/test_feature.py::test_feature_stft_fields[512-channels_first-False]",
"tests/test_feature.py::test_feature_stft_fields[512-channels_first-True]",
"tests/test_feature.py::test_feature_stft_fields[1024-None-False]",
"tests/test_feature.py::test_feature_stft_fields[1024-None-True]",
"tests/test_feature.py::test_feature_stft_fields[1024-tf-False]",
"tests/test_feature.py::test_feature_stft_fields[1024-tf-True]",
"tests/test_feature.py::test_feature_stft_fields[1024-th-False]",
"tests/test_feature.py::test_feature_stft_fields[1024-th-True]",
"tests/test_feature.py::test_feature_stft_fields[1024-channels_last-False]",
"tests/test_feature.py::test_feature_stft_fields[1024-channels_last-True]",
"tests/test_feature.py::test_feature_stft_fields[1024-channels_first-False]",
"tests/test_feature.py::test_feature_stft_fields[1024-channels_first-True]",
"tests/test_feature.py::test_feature_stft_mag_fields[512-None]",
"tests/test_feature.py::test_feature_stft_mag_fields[512-tf]",
"tests/test_feature.py::test_feature_stft_mag_fields[512-th]",
"tests/test_feature.py::test_feature_stft_mag_fields[512-channels_last]",
"tests/test_feature.py::test_feature_stft_mag_fields[512-channels_first]",
"tests/test_feature.py::test_feature_stft_mag_fields[1024-None]",
"tests/test_feature.py::test_feature_stft_mag_fields[1024-tf]",
"tests/test_feature.py::test_feature_stft_mag_fields[1024-th]",
"tests/test_feature.py::test_feature_stft_mag_fields[1024-channels_last]",
"tests/test_feature.py::test_feature_stft_mag_fields[1024-channels_first]",
"tests/test_feature.py::test_feature_stft_phasediff_fields[512-None]",
"tests/test_feature.py::test_feature_stft_phasediff_fields[512-tf]",
"tests/test_feature.py::test_feature_stft_phasediff_fields[512-th]",
"tests/test_feature.py::test_feature_stft_phasediff_fields[512-channels_last]",
"tests/test_feature.py::test_feature_stft_phasediff_fields[512-channels_first]",
"tests/test_feature.py::test_feature_stft_phasediff_fields[1024-None]",
"tests/test_feature.py::test_feature_stft_phasediff_fields[1024-tf]",
"tests/test_feature.py::test_feature_stft_phasediff_fields[1024-th]",
"tests/test_feature.py::test_feature_stft_phasediff_fields[1024-channels_last]",
"tests/test_feature.py::test_feature_stft_phasediff_fields[1024-channels_first]",
"tests/test_feature.py::test_feature_mel_fields[512-32-None]",
"tests/test_feature.py::test_feature_mel_fields[512-32-tf]",
"tests/test_feature.py::test_feature_mel_fields[512-32-th]",
"tests/test_feature.py::test_feature_mel_fields[512-32-channels_last]",
"tests/test_feature.py::test_feature_mel_fields[512-32-channels_first]",
"tests/test_feature.py::test_feature_mel_fields[512-128-None]",
"tests/test_feature.py::test_feature_mel_fields[512-128-tf]",
"tests/test_feature.py::test_feature_mel_fields[512-128-th]",
"tests/test_feature.py::test_feature_mel_fields[512-128-channels_last]",
"tests/test_feature.py::test_feature_mel_fields[512-128-channels_first]",
"tests/test_feature.py::test_feature_mel_fields[1024-32-None]",
"tests/test_feature.py::test_feature_mel_fields[1024-32-tf]",
"tests/test_feature.py::test_feature_mel_fields[1024-32-th]",
"tests/test_feature.py::test_feature_mel_fields[1024-32-channels_last]",
"tests/test_feature.py::test_feature_mel_fields[1024-32-channels_first]",
"tests/test_feature.py::test_feature_mel_fields[1024-128-None]",
"tests/test_feature.py::test_feature_mel_fields[1024-128-tf]",
"tests/test_feature.py::test_feature_mel_fields[1024-128-th]",
"tests/test_feature.py::test_feature_mel_fields[1024-128-channels_last]",
"tests/test_feature.py::test_feature_mel_fields[1024-128-channels_first]",
"tests/test_feature.py::test_feature_cqt_fields[1-1-None]",
"tests/test_feature.py::test_feature_cqt_fields[1-1-tf]",
"tests/test_feature.py::test_feature_cqt_fields[1-1-th]",
"tests/test_feature.py::test_feature_cqt_fields[1-1-channels_last]",
"tests/test_feature.py::test_feature_cqt_fields[1-1-channels_first]",
"tests/test_feature.py::test_feature_cqt_fields[1-4-None]",
"tests/test_feature.py::test_feature_cqt_fields[1-4-tf]",
"tests/test_feature.py::test_feature_cqt_fields[1-4-th]",
"tests/test_feature.py::test_feature_cqt_fields[1-4-channels_last]",
"tests/test_feature.py::test_feature_cqt_fields[1-4-channels_first]",
"tests/test_feature.py::test_feature_cqt_fields[3-1-None]",
"tests/test_feature.py::test_feature_cqt_fields[3-1-tf]",
"tests/test_feature.py::test_feature_cqt_fields[3-1-th]",
"tests/test_feature.py::test_feature_cqt_fields[3-1-channels_last]",
"tests/test_feature.py::test_feature_cqt_fields[3-1-channels_first]",
"tests/test_feature.py::test_feature_cqt_fields[3-4-None]",
"tests/test_feature.py::test_feature_cqt_fields[3-4-tf]",
"tests/test_feature.py::test_feature_cqt_fields[3-4-th]",
"tests/test_feature.py::test_feature_cqt_fields[3-4-channels_last]",
"tests/test_feature.py::test_feature_cqt_fields[3-4-channels_first]",
"tests/test_feature.py::test_feature_cqtmag_fields[1-1-None]",
"tests/test_feature.py::test_feature_cqtmag_fields[1-1-tf]",
"tests/test_feature.py::test_feature_cqtmag_fields[1-1-th]",
"tests/test_feature.py::test_feature_cqtmag_fields[1-1-channels_last]",
"tests/test_feature.py::test_feature_cqtmag_fields[1-1-channels_first]",
"tests/test_feature.py::test_feature_cqtmag_fields[1-4-None]",
"tests/test_feature.py::test_feature_cqtmag_fields[1-4-tf]",
"tests/test_feature.py::test_feature_cqtmag_fields[1-4-th]",
"tests/test_feature.py::test_feature_cqtmag_fields[1-4-channels_last]",
"tests/test_feature.py::test_feature_cqtmag_fields[1-4-channels_first]",
"tests/test_feature.py::test_feature_cqtmag_fields[3-1-None]",
"tests/test_feature.py::test_feature_cqtmag_fields[3-1-tf]",
"tests/test_feature.py::test_feature_cqtmag_fields[3-1-th]",
"tests/test_feature.py::test_feature_cqtmag_fields[3-1-channels_last]",
"tests/test_feature.py::test_feature_cqtmag_fields[3-1-channels_first]",
"tests/test_feature.py::test_feature_cqtmag_fields[3-4-None]",
"tests/test_feature.py::test_feature_cqtmag_fields[3-4-tf]",
"tests/test_feature.py::test_feature_cqtmag_fields[3-4-th]",
"tests/test_feature.py::test_feature_cqtmag_fields[3-4-channels_last]",
"tests/test_feature.py::test_feature_cqtmag_fields[3-4-channels_first]",
"tests/test_feature.py::test_feature_cqtphasediff_fields[1-1-None]",
"tests/test_feature.py::test_feature_cqtphasediff_fields[1-1-tf]",
"tests/test_feature.py::test_feature_cqtphasediff_fields[1-1-th]",
"tests/test_feature.py::test_feature_cqtphasediff_fields[1-1-channels_last]",
"tests/test_feature.py::test_feature_cqtphasediff_fields[1-1-channels_first]",
"tests/test_feature.py::test_feature_cqtphasediff_fields[1-4-None]",
"tests/test_feature.py::test_feature_cqtphasediff_fields[1-4-tf]",
"tests/test_feature.py::test_feature_cqtphasediff_fields[1-4-th]",
"tests/test_feature.py::test_feature_cqtphasediff_fields[1-4-channels_last]",
"tests/test_feature.py::test_feature_cqtphasediff_fields[1-4-channels_first]",
"tests/test_feature.py::test_feature_cqtphasediff_fields[3-1-None]",
"tests/test_feature.py::test_feature_cqtphasediff_fields[3-1-tf]",
"tests/test_feature.py::test_feature_cqtphasediff_fields[3-1-th]",
"tests/test_feature.py::test_feature_cqtphasediff_fields[3-1-channels_last]",
"tests/test_feature.py::test_feature_cqtphasediff_fields[3-1-channels_first]",
"tests/test_feature.py::test_feature_cqtphasediff_fields[3-4-None]",
"tests/test_feature.py::test_feature_cqtphasediff_fields[3-4-tf]",
"tests/test_feature.py::test_feature_cqtphasediff_fields[3-4-th]",
"tests/test_feature.py::test_feature_cqtphasediff_fields[3-4-channels_last]",
"tests/test_feature.py::test_feature_cqtphasediff_fields[3-4-channels_first]",
"tests/test_feature.py::test_feature_tempogram_fields[192-None]",
"tests/test_feature.py::test_feature_tempogram_fields[192-tf]",
"tests/test_feature.py::test_feature_tempogram_fields[192-th]",
"tests/test_feature.py::test_feature_tempogram_fields[192-channels_last]",
"tests/test_feature.py::test_feature_tempogram_fields[192-channels_first]",
"tests/test_feature.py::test_feature_tempogram_fields[384-None]",
"tests/test_feature.py::test_feature_tempogram_fields[384-tf]",
"tests/test_feature.py::test_feature_tempogram_fields[384-th]",
"tests/test_feature.py::test_feature_tempogram_fields[384-channels_last]",
"tests/test_feature.py::test_feature_tempogram_fields[384-channels_first]",
"tests/test_feature.py::test_feature_temposcale_fields[192-16-None]",
"tests/test_feature.py::test_feature_temposcale_fields[192-16-tf]",
"tests/test_feature.py::test_feature_temposcale_fields[192-16-th]",
"tests/test_feature.py::test_feature_temposcale_fields[192-16-channels_last]",
"tests/test_feature.py::test_feature_temposcale_fields[192-16-channels_first]",
"tests/test_feature.py::test_feature_temposcale_fields[192-128-None]",
"tests/test_feature.py::test_feature_temposcale_fields[192-128-tf]",
"tests/test_feature.py::test_feature_temposcale_fields[192-128-th]",
"tests/test_feature.py::test_feature_temposcale_fields[192-128-channels_last]",
"tests/test_feature.py::test_feature_temposcale_fields[192-128-channels_first]",
"tests/test_feature.py::test_feature_temposcale_fields[384-16-None]",
"tests/test_feature.py::test_feature_temposcale_fields[384-16-tf]",
"tests/test_feature.py::test_feature_temposcale_fields[384-16-th]",
"tests/test_feature.py::test_feature_temposcale_fields[384-16-channels_last]",
"tests/test_feature.py::test_feature_temposcale_fields[384-16-channels_first]",
"tests/test_feature.py::test_feature_temposcale_fields[384-128-None]",
"tests/test_feature.py::test_feature_temposcale_fields[384-128-tf]",
"tests/test_feature.py::test_feature_temposcale_fields[384-128-th]",
"tests/test_feature.py::test_feature_temposcale_fields[384-128-channels_last]",
"tests/test_feature.py::test_feature_temposcale_fields[384-128-channels_first]"
]
| []
| ISC License | 1,146 | [
"pumpp/feature/cqt.py",
"pumpp/feature/base.py",
"pumpp/feature/__init__.py"
]
| [
"pumpp/feature/cqt.py",
"pumpp/feature/base.py",
"pumpp/feature/__init__.py"
]
|
|
Azure__azure-cli-2751 | 678b940f29230f53fd7cbec7211842e99658ed3d | 2017-04-04 17:41:26 | eb12ac454cbe1ddb59c86cdf2045e1912660e750 | codecov-io: # [Codecov](https://codecov.io/gh/Azure/azure-cli/pull/2751?src=pr&el=h1) Report
> Merging [#2751](https://codecov.io/gh/Azure/azure-cli/pull/2751?src=pr&el=desc) into [master](https://codecov.io/gh/Azure/azure-cli/commit/1ccca6b1c767efed9068f41446e2689e1b2f8ecf?src=pr&el=desc) will **increase** coverage by `0.02%`.
> The diff coverage is `46.51%`.
[](https://codecov.io/gh/Azure/azure-cli/pull/2751?src=pr&el=tree)
```diff
@@ Coverage Diff @@
## master #2751 +/- ##
==========================================
+ Coverage 61.15% 61.17% +0.02%
==========================================
Files 480 480
Lines 25743 25762 +19
Branches 3898 3901 +3
==========================================
+ Hits 15742 15759 +17
- Misses 9068 9077 +9
+ Partials 933 926 -7
```
| [Impacted Files](https://codecov.io/gh/Azure/azure-cli/pull/2751?src=pr&el=tree) | Coverage Δ | |
|---|---|---|
| [...twork/azure/cli/command\_modules/network/\_params.py](https://codecov.io/gh/Azure/azure-cli/pull/2751?src=pr&el=tree#diff-c3JjL2NvbW1hbmRfbW9kdWxlcy9henVyZS1jbGktbmV0d29yay9henVyZS9jbGkvY29tbWFuZF9tb2R1bGVzL25ldHdvcmsvX3BhcmFtcy5weQ==) | `92.49% <100%> (-0.04%)` | :arrow_down: |
| [...network/azure/cli/command\_modules/network/\_help.py](https://codecov.io/gh/Azure/azure-cli/pull/2751?src=pr&el=tree#diff-c3JjL2NvbW1hbmRfbW9kdWxlcy9henVyZS1jbGktbmV0d29yay9henVyZS9jbGkvY29tbWFuZF9tb2R1bGVzL25ldHdvcmsvX2hlbHAucHk=) | `100% <100%> (ø)` | :arrow_up: |
| [...etwork/azure/cli/command\_modules/network/custom.py](https://codecov.io/gh/Azure/azure-cli/pull/2751?src=pr&el=tree#diff-c3JjL2NvbW1hbmRfbW9kdWxlcy9henVyZS1jbGktbmV0d29yay9henVyZS9jbGkvY29tbWFuZF9tb2R1bGVzL25ldHdvcmsvY3VzdG9tLnB5) | `62.46% <21.05%> (-0.37%)` | :arrow_down: |
| [...k/azure/cli/command\_modules/network/\_validators.py](https://codecov.io/gh/Azure/azure-cli/pull/2751?src=pr&el=tree#diff-c3JjL2NvbW1hbmRfbW9kdWxlcy9henVyZS1jbGktbmV0d29yay9henVyZS9jbGkvY29tbWFuZF9tb2R1bGVzL25ldHdvcmsvX3ZhbGlkYXRvcnMucHk=) | `63.97% <60%> (+0.34%)` | :arrow_up: |
| [...-cli-role/azure/cli/command\_modules/role/custom.py](https://codecov.io/gh/Azure/azure-cli/pull/2751?src=pr&el=tree#diff-c3JjL2NvbW1hbmRfbW9kdWxlcy9henVyZS1jbGktcm9sZS9henVyZS9jbGkvY29tbWFuZF9tb2R1bGVzL3JvbGUvY3VzdG9tLnB5) | `19.28% <0%> (ø)` | :arrow_up: |
| [...work/azure/cli/command\_modules/network/commands.py](https://codecov.io/gh/Azure/azure-cli/pull/2751?src=pr&el=tree#diff-c3JjL2NvbW1hbmRfbW9kdWxlcy9henVyZS1jbGktbmV0d29yay9henVyZS9jbGkvY29tbWFuZF9tb2R1bGVzL25ldHdvcmsvY29tbWFuZHMucHk=) | `98.98% <0%> (ø)` | :arrow_up: |
| [...dback/azure/cli/command\_modules/feedback/custom.py](https://codecov.io/gh/Azure/azure-cli/pull/2751?src=pr&el=tree#diff-c3JjL2NvbW1hbmRfbW9kdWxlcy9henVyZS1jbGktZmVlZGJhY2svYXp1cmUvY2xpL2NvbW1hbmRfbW9kdWxlcy9mZWVkYmFjay9jdXN0b20ucHk=) | `34.69% <0%> (ø)` | :arrow_up: |
| [src/azure-cli-core/azure/cli/core/util.py](https://codecov.io/gh/Azure/azure-cli/pull/2751?src=pr&el=tree#diff-c3JjL2F6dXJlLWNsaS1jb3JlL2F6dXJlL2NsaS9jb3JlL3V0aWwucHk=) | `68.99% <0%> (ø)` | :arrow_up: |
| [...e/cli/command\_modules/network/\_template\_builder.py](https://codecov.io/gh/Azure/azure-cli/pull/2751?src=pr&el=tree#diff-c3JjL2NvbW1hbmRfbW9kdWxlcy9henVyZS1jbGktbmV0d29yay9henVyZS9jbGkvY29tbWFuZF9tb2R1bGVzL25ldHdvcmsvX3RlbXBsYXRlX2J1aWxkZXIucHk=) | `86.17% <0%> (+2.12%)` | :arrow_up: |
| ... and [1 more](https://codecov.io/gh/Azure/azure-cli/pull/2751?src=pr&el=tree-more) | |
------
[Continue to review full report at Codecov](https://codecov.io/gh/Azure/azure-cli/pull/2751?src=pr&el=continue).
> **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta)
> `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data`
> Powered by [Codecov](https://codecov.io/gh/Azure/azure-cli/pull/2751?src=pr&el=footer). Last update [1ccca6b...e2d2234](https://codecov.io/gh/Azure/azure-cli/pull/2751?src=pr&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments).
tjprescott: @derekbekoe
```
Command
az network vnet-gateway create: Create a virtual network gateway.
Arguments
--name -n [Required]: Name of the VNet gateway.
--public-ip-addresses [Required]: Specify a single public IP (name or ID) for an active-standby
gateway. Specify two public IPs for an active-active gateway.
--resource-group -g [Required]: Name of resource group. You can configure the default group
using 'az configure --defaults group=<name>'.
--vnet [Required]: Name or ID of an existing virtual network which has a subnet
named 'GatewaySubnet'.
--address-prefixes : Space separated list of address prefixes to associate with the
VNet gateway.
--gateway-type : The gateway type. Allowed values: ExpressRoute, Vpn.
Default: Vpn.
--location -l : Location. You can configure the default location using 'az
configure --defaults location=<location>'.
--no-wait : Do not wait for the long running operation to finish.
--sku : VNet gateway SKU. Allowed values: Basic, HighPerformance,
Standard, UltraPerformance. Default: Basic.
--tags : Space separated tags in 'key[=value]' format. Use "" to clear
existing tags.
--vpn-type : VPN routing type. Allowed values: PolicyBased, RouteBased.
Default: RouteBased.
BGP Peering Arguments
--asn : Autonomous System Number to use for the BGP settings.
--bgp-peering-address : IP address to use for BGP peering.
--peer-weight : Weight (0-100) added to routes learned through BGP peering.
```
The relevant text for update is the same.
derekbekoe: @tjprescott Looking at the help for `--public-ip-addresses`, how do I know how to specify 2 ip addresses? Maybe say that it should be space separated?
tjprescott: @derekbekoe good point. It used to say space-separated. I'll add that back in. | diff --git a/src/command_modules/azure-cli-network/HISTORY.rst b/src/command_modules/azure-cli-network/HISTORY.rst
index ab972d502..b7c518c89 100644
--- a/src/command_modules/azure-cli-network/HISTORY.rst
+++ b/src/command_modules/azure-cli-network/HISTORY.rst
@@ -5,7 +5,7 @@ Release History
unreleased
++++++++++++++++++
-
+* Add support for active-active VNet gateways
* Remove nulls values from output of `network vpn-connection list/show` commands.
2.0.2 (2017-04-03)
diff --git a/src/command_modules/azure-cli-network/azure/cli/command_modules/network/_help.py b/src/command_modules/azure-cli-network/azure/cli/command_modules/network/_help.py
index ce2b253d2..d8e754bbe 100644
--- a/src/command_modules/azure-cli-network/azure/cli/command_modules/network/_help.py
+++ b/src/command_modules/azure-cli-network/azure/cli/command_modules/network/_help.py
@@ -1764,6 +1764,11 @@ helps['network vnet-gateway update'] = """
type: command
short-summary: Update a virtual network gateway.
"""
+
+helps['network vnet-gateway wait'] = """
+ type: command
+ short-summary: Place the CLI in a waiting state until a condition of the virtual network gateway is met.
+"""
#endregion
# region VNet Gateway Revoke Cert
@@ -1788,7 +1793,7 @@ helps['network vnet-gateway revoked-cert delete'] = """
# region VNet Gateway Root Cert
helps['network vnet-gateway root-cert'] = """
type: group
- short-summary: Manage root certificates for a virtuak network gateway.
+ short-summary: Manage root certificates for a virtual network gateway.
"""
helps['network vnet-gateway root-cert create'] = """
diff --git a/src/command_modules/azure-cli-network/azure/cli/command_modules/network/_params.py b/src/command_modules/azure-cli-network/azure/cli/command_modules/network/_params.py
index 55b665ef8..fa33e4fe4 100644
--- a/src/command_modules/azure-cli-network/azure/cli/command_modules/network/_params.py
+++ b/src/command_modules/azure-cli-network/azure/cli/command_modules/network/_params.py
@@ -35,14 +35,14 @@ from azure.cli.command_modules.network._validators import \
process_public_ip_create_namespace, validate_private_ip_address,
process_lb_frontend_ip_namespace, process_local_gateway_create_namespace,
process_tm_endpoint_create_namespace, process_vnet_create_namespace,
- process_vnet_gateway_create_namespace, process_vpn_connection_create_namespace,
+ process_vnet_gateway_create_namespace, process_vnet_gateway_update_namespace,
+ process_vpn_connection_create_namespace,
process_ag_ssl_policy_set_namespace, process_route_table_create_namespace,
validate_auth_cert, validate_cert, validate_inbound_nat_rule_id_list,
validate_address_pool_id_list, validate_inbound_nat_rule_name_or_id,
validate_address_pool_name_or_id, validate_servers, load_cert_file, validate_metadata,
validate_peering_type, validate_dns_record_type,
- get_public_ip_validator, get_nsg_validator, get_subnet_validator,
- get_virtual_network_validator)
+ get_public_ip_validator, get_nsg_validator, get_subnet_validator)
from azure.mgmt.network.models import ApplicationGatewaySslProtocol
from azure.cli.command_modules.network.custom import list_traffic_manager_endpoints
@@ -480,17 +480,15 @@ register_cli_argument('network vnet-gateway', 'sku', help='VNet gateway SKU.', *
register_cli_argument('network vnet-gateway', 'vpn_type', help='VPN routing type.', **enum_choice_list(VpnType))
register_cli_argument('network vnet-gateway', 'bgp_peering_address', arg_group='BGP Peering', help='IP address to use for BGP peering.')
register_cli_argument('network vnet-gateway', 'address_prefixes', help='Space separated list of address prefixes to associate with the VNet gateway.', nargs='+')
+register_cli_argument('network vnet-gateway', 'public_ip_address', options_list=['--public-ip-addresses'], nargs='+', help='Specify a single public IP (name or ID) for an active-standby gateway. Specify two space-separated public IPs for an active-active gateway.', completer=get_resource_name_completion_list('Microsoft.Network/publicIPAddresses'))
+
register_cli_argument('network vnet-gateway create', 'asn', validator=process_vnet_gateway_create_namespace)
register_cli_argument('network vnet-gateway update', 'enable_bgp', help='Enable BGP (Border Gateway Protocol)', arg_group='BGP Peering', **enum_choice_list(['true', 'false']))
-register_cli_argument('network vnet-gateway update', 'public_ip_address', help='Name or ID of a public IP address.', validator=get_public_ip_validator())
-register_cli_argument('network vnet-gateway update', 'virtual_network', virtual_network_name_type, options_list=('--vnet',), help="Name or ID of a virtual network that contains a subnet named 'GatewaySubnet'.", validator=get_virtual_network_validator())
-
-public_ip_help = get_folded_parameter_help_string('public IP address')
-register_cli_argument('network vnet-gateway create', 'public_ip_address', help=public_ip_help, completer=get_resource_name_completion_list('Microsoft.Network/publicIPAddresses'), validator=get_public_ip_validator())
+register_cli_argument('network vnet-gateway update', 'virtual_network', virtual_network_name_type, options_list=('--vnet',), help="Name or ID of a virtual network that contains a subnet named 'GatewaySubnet'.", validator=process_vnet_gateway_update_namespace)
vnet_help = "Name or ID of an existing virtual network which has a subnet named 'GatewaySubnet'."
-register_cli_argument('network vnet-gateway create', 'virtual_network', options_list=('--vnet',), help=vnet_help, validator=get_virtual_network_validator())
+register_cli_argument('network vnet-gateway create', 'virtual_network', options_list=('--vnet',), help=vnet_help)
register_cli_argument('network vnet-gateway root-cert create', 'public_cert_data', help='Base64 contents of the root certificate file or file path.', type=file_type, completer=FilesCompleter(), validator=load_cert_file('public_cert_data'))
register_cli_argument('network vnet-gateway root-cert create', 'cert_name', help='Root certificate name', options_list=('--name', '-n'))
diff --git a/src/command_modules/azure-cli-network/azure/cli/command_modules/network/_validators.py b/src/command_modules/azure-cli-network/azure/cli/command_modules/network/_validators.py
index 3d48e6e30..22c3c2309 100644
--- a/src/command_modules/azure-cli-network/azure/cli/command_modules/network/_validators.py
+++ b/src/command_modules/azure-cli-network/azure/cli/command_modules/network/_validators.py
@@ -174,15 +174,24 @@ def get_public_ip_validator(has_type_field=False, allow_none=False, allow_new=Fa
for an existing name or ID with no ARM-required -type parameter. """
def simple_validator(namespace):
if namespace.public_ip_address:
- # determine if public_ip_address is name or ID
- is_id = is_valid_resource_id(namespace.public_ip_address)
- if not is_id:
- namespace.public_ip_address = resource_id(
+ is_list = isinstance(namespace.public_ip_address, list)
+
+ def _validate_name_or_id(public_ip):
+ # determine if public_ip_address is name or ID
+ is_id = is_valid_resource_id(public_ip)
+ return public_ip if is_id else resource_id(
subscription=get_subscription_id(),
resource_group=namespace.resource_group_name,
namespace='Microsoft.Network',
type='publicIPAddresses',
- name=namespace.public_ip_address)
+ name=public_ip)
+
+ if is_list:
+ for i, public_ip in enumerate(namespace.public_ip_address):
+ namespace.public_ip_address[i] = _validate_name_or_id(public_ip)
+ else:
+ namespace.public_ip_address = _validate_name_or_id(namespace.public_ip_address)
+
def complex_validator_with_type(namespace):
get_folded_parameter_validator(
@@ -521,11 +530,28 @@ def process_vnet_create_namespace(namespace):
def process_vnet_gateway_create_namespace(namespace):
ns = namespace
get_default_location_from_resource_group(ns)
+ get_virtual_network_validator()(ns)
+
+ get_public_ip_validator()(ns)
+ public_ip_count = len(ns.public_ip_address or [])
+ if public_ip_count > 2:
+ raise CLIError('Specify a single public IP to create an active-standby gateway or two '
+ 'public IPs to create an active-active gateway.')
+
enable_bgp = any([ns.asn, ns.bgp_peering_address, ns.peer_weight])
if enable_bgp and not ns.asn:
raise ValueError(
'incorrect usage: --asn ASN [--peer-weight WEIGHT --bgp-peering-address IP ]')
+def process_vnet_gateway_update_namespace(namespace):
+ ns = namespace
+ get_virtual_network_validator()(ns)
+ get_public_ip_validator()(ns)
+ public_ip_count = len(ns.public_ip_address or [])
+ if public_ip_count > 2:
+ raise CLIError('Specify a single public IP to create an active-standby gateway or two '
+ 'public IPs to create an active-active gateway.')
+
def process_vpn_connection_create_namespace(namespace):
get_default_location_from_resource_group(namespace)
diff --git a/src/command_modules/azure-cli-network/azure/cli/command_modules/network/custom.py b/src/command_modules/azure-cli-network/azure/cli/command_modules/network/custom.py
index aa6ce7921..e1a8ca19d 100644
--- a/src/command_modules/azure-cli-network/azure/cli/command_modules/network/custom.py
+++ b/src/command_modules/azure-cli-network/azure/cli/command_modules/network/custom.py
@@ -1333,13 +1333,16 @@ def create_vnet_gateway(resource_group_name, virtual_network_gateway_name, publi
client = _network_client_factory().virtual_network_gateways
subnet = virtual_network + '/subnets/GatewaySubnet'
- ip_configuration = VirtualNetworkGatewayIPConfiguration(
- SubResource(subnet),
- SubResource(public_ip_address),
- private_ip_allocation_method='Dynamic', name='vnetGatewayConfig')
+ active_active = len(public_ip_address) == 2
vnet_gateway = VirtualNetworkGateway(
- [ip_configuration], gateway_type, vpn_type, location=location, tags=tags,
- sku=VirtualNetworkGatewaySku(sku, sku))
+ [], gateway_type, vpn_type, location=location, tags=tags,
+ sku=VirtualNetworkGatewaySku(sku, sku), active_active=active_active)
+ for i, public_ip in enumerate(public_ip_address):
+ ip_configuration = VirtualNetworkGatewayIPConfiguration(
+ SubResource(subnet),
+ SubResource(public_ip),
+ private_ip_allocation_method='Dynamic', name='vnetGatewayConfig{}'.format(i))
+ vnet_gateway.ip_configurations.append(ip_configuration)
if asn or bgp_peering_address or peer_weight:
vnet_gateway.enable_bgp = True
vnet_gateway.bgp_settings = BgpSettings(asn, bgp_peering_address, peer_weight)
@@ -1354,6 +1357,7 @@ def update_vnet_gateway(instance, address_prefixes=None, sku=None, vpn_type=None
public_ip_address=None, gateway_type=None, enable_bgp=None,
asn=None, bgp_peering_address=None, peer_weight=None, virtual_network=None,
tags=None):
+ from azure.mgmt.network.models import VirtualNetworkGatewayIPConfiguration
if address_prefixes is not None:
if not instance.vpn_client_configuration:
@@ -1376,8 +1380,28 @@ def update_vnet_gateway(instance, address_prefixes=None, sku=None, vpn_type=None
if tags is not None:
instance.tags = tags
+ subnet_id = '{}/subnets/GatewaySubnet'.format(virtual_network) if virtual_network else \
+ instance.ip_configurations[0].subnet.id
+ if virtual_network is not None:
+ for config in instance.ip_configurations:
+ config.subnet.id = subnet_id
+
if public_ip_address is not None:
- instance.ip_configurations[0].public_ip_address.id = public_ip_address
+ instance.ip_configurations = []
+ for i, public_ip in enumerate(public_ip_address):
+ ip_configuration = VirtualNetworkGatewayIPConfiguration(
+ SubResource(subnet_id),
+ SubResource(public_ip),
+ private_ip_allocation_method='Dynamic', name='vnetGatewayConfig{}'.format(i))
+ instance.ip_configurations.append(ip_configuration)
+
+ # Update active-active/active-standby status
+ active_active = len(public_ip_address) == 2
+ if instance.active_active and not active_active:
+ logger.info('Placing gateway in active-standby mode.')
+ elif not instance.active_active and active_active:
+ logger.info('Placing gateway in active-active mode.')
+ instance.active_active = active_active
if gateway_type is not None:
instance.gateway_type = gateway_type
@@ -1387,10 +1411,6 @@ def update_vnet_gateway(instance, address_prefixes=None, sku=None, vpn_type=None
_validate_bgp_peering(instance, asn, bgp_peering_address, peer_weight)
- if virtual_network is not None:
- instance.ip_configurations[0].subnet.id = \
- '{}/subnets/GatewaySubnet'.format(virtual_network)
-
return instance
# endregion
@@ -1563,7 +1583,7 @@ def create_local_gateway(resource_group_name, local_network_gateway_name, gatewa
from azure.mgmt.network.models import LocalNetworkGateway, BgpSettings
client = _network_client_factory().local_network_gateways
local_gateway = LocalNetworkGateway(
- local_address_prefix or [], location=location, tags=tags,
+ AddressSpace(local_address_prefix or []), location=location, tags=tags,
gateway_ip_address=gateway_ip_address)
if bgp_peering_address or asn or peer_weight:
local_gateway.bgp_settings = BgpSettings(asn, bgp_peering_address, peer_weight)
| [Network] VNET gateway should support configuration of active-active gateways
This is a new feature which was also announced in Ignite last year. Please refer to the Powershell cmdlet documentation or get in touch with me.
- [x] Active-Active Cross Premise
- [x] Active-Active VNet-to-VNet
- [x] Active-Standby to Active-Active
- [x] Active-Active to Active-Standby | Azure/azure-cli | diff --git a/src/command_modules/azure-cli-network/tests/recordings/test_network_active_active_cross_premise_connection.yaml b/src/command_modules/azure-cli-network/tests/recordings/test_network_active_active_cross_premise_connection.yaml
new file mode 100644
index 000000000..b149b2394
--- /dev/null
+++ b/src/command_modules/azure-cli-network/tests/recordings/test_network_active_active_cross_premise_connection.yaml
@@ -0,0 +1,4359 @@
+interactions:
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 resourcemanagementclient/0.30.2 Azure-SDK-For-Python
+ AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b1d450c-1643-11e7-b555-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_active_active_cross_premise_connection?api-version=2016-09-01
+ response:
+ body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_cross_premise_connection","name":"cli_test_active_active_cross_premise_connection","location":"westus","tags":{"use":"az-test"},"properties":{"provisioningState":"Succeeded"}}'}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 18:52:19 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Vary: [Accept-Encoding]
+ content-length: ['284']
+ status: {code: 200, message: OK}
+- request:
+ body: '{"location": "westus", "properties": {"ipConfigurations": [{"properties":
+ {"subnet": {"id": "/subscriptions/0b1f6471-1bf0-4dda-aec3-cb9272f09590/resourceGroups/cli_test_active_active_cross_premise_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet"},
+ "publicIPAddress": {"id": "/subscriptions/0b1f6471-1bf0-4dda-aec3-cb9272f09590/resourceGroups/cli_test_active_active_cross_premise_connection/providers/Microsoft.Network/publicIPAddresses/gwip1"},
+ "privateIPAllocationMethod": "Dynamic"}, "name": "vnetGatewayConfig0"}, {"properties":
+ {"subnet": {"id": "/subscriptions/0b1f6471-1bf0-4dda-aec3-cb9272f09590/resourceGroups/cli_test_active_active_cross_premise_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet"},
+ "publicIPAddress": {"id": "/subscriptions/0b1f6471-1bf0-4dda-aec3-cb9272f09590/resourceGroups/cli_test_active_active_cross_premise_connection/providers/Microsoft.Network/publicIPAddresses/gwip2"},
+ "privateIPAllocationMethod": "Dynamic"}, "name": "vnetGatewayConfig1"}], "vpnType":
+ "RouteBased", "bgpSettings": {"asn": 65010}, "activeActive": true, "sku": {"tier":
+ "HighPerformance", "name": "HighPerformance"}, "enableBgp": true, "gatewayType":
+ "Vpn"}}'
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Length: ['1252']
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: PUT
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_cross_premise_connection/providers/Microsoft.Network/virtualNetworkGateways/gw1?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"name\": \"gw1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_cross_premise_connection/providers/Microsoft.Network/virtualNetworkGateways/gw1\"\
+ ,\r\n \"etag\": \"W/\\\"510c3035-8b86-4416-8536-1d93ffce512b\\\"\",\r\n \
+ \ \"type\": \"Microsoft.Network/virtualNetworkGateways\",\r\n \"location\"\
+ : \"westus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"resourceGuid\": \"15632be1-08e9-4994-93d3-4072c5ffce4f\",\r\n \
+ \ \"ipConfigurations\": [\r\n {\r\n \"name\": \"vnetGatewayConfig0\"\
+ ,\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_cross_premise_connection/providers/Microsoft.Network/virtualNetworkGateways/gw1/ipConfigurations/vnetGatewayConfig0\"\
+ ,\r\n \"etag\": \"W/\\\"510c3035-8b86-4416-8536-1d93ffce512b\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_cross_premise_connection/providers/Microsoft.Network/publicIPAddresses/gwip1\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_cross_premise_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n },\r\n {\r\n \"name\": \"\
+ vnetGatewayConfig1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_cross_premise_connection/providers/Microsoft.Network/virtualNetworkGateways/gw1/ipConfigurations/vnetGatewayConfig1\"\
+ ,\r\n \"etag\": \"W/\\\"510c3035-8b86-4416-8536-1d93ffce512b\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_cross_premise_connection/providers/Microsoft.Network/publicIPAddresses/gwip2\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_cross_premise_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n }\r\n ],\r\n \"sku\": {\r\n \
+ \ \"name\": \"HighPerformance\",\r\n \"tier\": \"HighPerformance\"\
+ ,\r\n \"capacity\": 2\r\n },\r\n \"gatewayType\": \"Vpn\",\r\n\
+ \ \"vpnType\": \"RouteBased\",\r\n \"enableBgp\": true,\r\n \"activeActive\"\
+ : true,\r\n \"vpnClientConfiguration\": {\r\n \"vpnClientRootCertificates\"\
+ : [],\r\n \"vpnClientRevokedCertificates\": []\r\n },\r\n \"bgpSettings\"\
+ : {\r\n \"asn\": 65010,\r\n \"peerWeight\": 0\r\n }\r\n }\r\n\
+ }"}
+ headers:
+ Azure-AsyncOperation: ['https://management.azure.com/subscriptions/0b1f6471-1bf0-4dda-aec3-cb9272f09590/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01']
+ Cache-Control: [no-cache]
+ Content-Length: ['2863']
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 18:52:20 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ x-ms-ratelimit-remaining-subscription-writes: ['1199']
+ status: {code: 201, message: Created}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 18:52:31 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 18:52:41 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 18:52:51 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 18:53:01 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 18:53:12 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 18:53:21 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 18:53:32 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 18:53:43 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 18:53:52 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 18:54:03 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 18:54:14 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 18:54:23 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 18:54:34 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 18:54:44 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 18:54:54 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 18:55:05 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 18:55:16 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 18:55:26 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 18:55:36 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 18:55:48 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 18:55:58 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 18:56:08 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 18:56:18 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 18:56:28 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 18:56:38 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 18:56:48 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 18:56:59 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 18:57:09 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 18:57:19 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 18:57:31 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 18:57:41 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 18:57:50 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 18:58:01 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 18:58:12 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 18:58:23 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 18:58:33 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 18:58:43 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 18:58:55 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 18:59:05 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 18:59:15 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 18:59:26 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 18:59:36 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 18:59:46 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 18:59:57 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:00:07 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:00:16 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:00:27 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:00:37 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:00:47 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:00:58 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:01:08 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:01:18 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:01:28 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:01:38 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:01:49 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:02:00 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:02:10 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:02:20 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:02:31 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:02:42 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:02:52 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:03:03 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:03:13 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:03:24 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:03:34 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:03:46 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:03:56 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:04:07 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:04:17 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:04:27 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:04:38 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:04:47 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:04:58 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:05:08 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:05:18 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:05:29 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:05:40 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:05:49 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:05:59 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:06:10 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:06:21 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:06:32 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:06:42 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:06:53 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:07:03 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:07:14 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:07:24 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:07:35 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:07:45 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:07:55 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:08:05 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:08:16 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:08:26 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:08:36 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:08:47 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:08:57 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:09:07 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:09:17 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:09:27 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:09:38 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:09:48 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:09:58 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:10:09 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:10:18 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:10:29 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:10:39 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:10:50 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:11:00 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:11:10 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:11:21 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:11:31 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:11:41 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:11:52 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:12:01 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:12:12 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:12:22 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:12:33 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:12:43 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:12:54 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:13:04 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:13:15 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:13:25 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:13:36 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:13:47 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:13:58 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:14:08 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:14:19 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:14:29 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:14:40 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:14:50 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:15:01 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:15:11 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"InProgress\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:15:21 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['30']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/westus/operations/941ce25e-3871-4045-801b-2d998751f41c?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"Succeeded\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:15:35 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['29']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2b2e9d8a-1643-11e7-b51e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_cross_premise_connection/providers/Microsoft.Network/virtualNetworkGateways/gw1?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"name\": \"gw1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_cross_premise_connection/providers/Microsoft.Network/virtualNetworkGateways/gw1\"\
+ ,\r\n \"etag\": \"W/\\\"6bf49958-ad54-44fc-876d-3dcf789e525b\\\"\",\r\n \
+ \ \"type\": \"Microsoft.Network/virtualNetworkGateways\",\r\n \"location\"\
+ : \"westus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Succeeded\"\
+ ,\r\n \"resourceGuid\": \"15632be1-08e9-4994-93d3-4072c5ffce4f\",\r\n \
+ \ \"ipConfigurations\": [\r\n {\r\n \"name\": \"vnetGatewayConfig0\"\
+ ,\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_cross_premise_connection/providers/Microsoft.Network/virtualNetworkGateways/gw1/ipConfigurations/vnetGatewayConfig0\"\
+ ,\r\n \"etag\": \"W/\\\"6bf49958-ad54-44fc-876d-3dcf789e525b\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Succeeded\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_cross_premise_connection/providers/Microsoft.Network/publicIPAddresses/gwip1\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_cross_premise_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n },\r\n {\r\n \"name\": \"\
+ vnetGatewayConfig1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_cross_premise_connection/providers/Microsoft.Network/virtualNetworkGateways/gw1/ipConfigurations/vnetGatewayConfig1\"\
+ ,\r\n \"etag\": \"W/\\\"6bf49958-ad54-44fc-876d-3dcf789e525b\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Succeeded\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_cross_premise_connection/providers/Microsoft.Network/publicIPAddresses/gwip2\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_cross_premise_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n }\r\n ],\r\n \"sku\": {\r\n \
+ \ \"name\": \"HighPerformance\",\r\n \"tier\": \"HighPerformance\"\
+ ,\r\n \"capacity\": 2\r\n },\r\n \"gatewayType\": \"Vpn\",\r\n\
+ \ \"vpnType\": \"RouteBased\",\r\n \"enableBgp\": true,\r\n \"activeActive\"\
+ : true,\r\n \"bgpSettings\": {\r\n \"asn\": 65010,\r\n \"bgpPeeringAddress\"\
+ : \"10.12.255.4,10.12.255.5\",\r\n \"peerWeight\": 0\r\n }\r\n }\r\
+ \n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:15:35 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['2798']
+ status: {code: 200, message: OK}
+- request:
+ body: '{"location": "eastus", "properties": {"gatewayIpAddress": "131.107.72.22",
+ "localNetworkAddressSpace": {"addressPrefixes": ["10.52.255.253/32"]}, "bgpSettings":
+ {"asn": 65050, "bgpPeeringAddress": "10.52.255.253"}}}'
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Length: ['215']
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [6b591ed2-1646-11e7-993b-a0b3ccf7272a]
+ method: PUT
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_cross_premise_connection/providers/Microsoft.Network/localNetworkGateways/lgw2?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"name\": \"lgw2\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_cross_premise_connection/providers/Microsoft.Network/localNetworkGateways/lgw2\"\
+ ,\r\n \"etag\": \"W/\\\"e3d668f5-a20e-44b7-b7fa-38cfbe24ecb2\\\"\",\r\n \
+ \ \"type\": \"Microsoft.Network/localNetworkGateways\",\r\n \"location\"\
+ : \"eastus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"resourceGuid\": \"424519dd-2add-4dde-a6f5-e2347b51dfe9\",\r\n \
+ \ \"localNetworkAddressSpace\": {\r\n \"addressPrefixes\": [\r\n \
+ \ \"10.52.255.253/32\"\r\n ]\r\n },\r\n \"gatewayIpAddress\"\
+ : \"131.107.72.22\",\r\n \"bgpSettings\": {\r\n \"asn\": 65050,\r\n\
+ \ \"bgpPeeringAddress\": \"10.52.255.253\",\r\n \"peerWeight\":\
+ \ 0\r\n }\r\n }\r\n}"}
+ headers:
+ Azure-AsyncOperation: ['https://management.azure.com/subscriptions/0b1f6471-1bf0-4dda-aec3-cb9272f09590/providers/Microsoft.Network/locations/eastus/operations/c000c2be-3bef-497c-873b-4189a9b6c4ad?api-version=2016-09-01']
+ Cache-Control: [no-cache]
+ Content-Length: ['736']
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:15:37 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ x-ms-ratelimit-remaining-subscription-writes: ['1199']
+ status: {code: 201, message: Created}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [6b591ed2-1646-11e7-993b-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/eastus/operations/c000c2be-3bef-497c-873b-4189a9b6c4ad?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"Succeeded\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:15:47 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['29']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [6b591ed2-1646-11e7-993b-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_cross_premise_connection/providers/Microsoft.Network/localNetworkGateways/lgw2?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"name\": \"lgw2\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_cross_premise_connection/providers/Microsoft.Network/localNetworkGateways/lgw2\"\
+ ,\r\n \"etag\": \"W/\\\"0454970d-6377-4438-8279-562aa7e18fbe\\\"\",\r\n \
+ \ \"type\": \"Microsoft.Network/localNetworkGateways\",\r\n \"location\"\
+ : \"eastus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Succeeded\"\
+ ,\r\n \"resourceGuid\": \"424519dd-2add-4dde-a6f5-e2347b51dfe9\",\r\n \
+ \ \"localNetworkAddressSpace\": {\r\n \"addressPrefixes\": [\r\n \
+ \ \"10.52.255.253/32\"\r\n ]\r\n },\r\n \"gatewayIpAddress\"\
+ : \"131.107.72.22\",\r\n \"bgpSettings\": {\r\n \"asn\": 65050,\r\n\
+ \ \"bgpPeeringAddress\": \"10.52.255.253\",\r\n \"peerWeight\":\
+ \ 0\r\n }\r\n }\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:15:48 GMT']
+ ETag: [W/"0454970d-6377-4438-8279-562aa7e18fbe"]
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['737']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 resourcemanagementclient/0.30.2 Azure-SDK-For-Python
+ AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [73750026-1646-11e7-a7a4-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_active_active_cross_premise_connection?api-version=2016-09-01
+ response:
+ body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_cross_premise_connection","name":"cli_test_active_active_cross_premise_connection","location":"westus","tags":{"use":"az-test"},"properties":{"provisioningState":"Succeeded"}}'}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:15:49 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Vary: [Accept-Encoding]
+ content-length: ['284']
+ status: {code: 200, message: OK}
+- request:
+ body: '{"properties": {"mode": "Incremental", "template": {"outputs": {"resource":
+ {"value": "[reference(''Vnet1toSite5_1'')]", "type": "object"}}, "$schema":
+ "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#",
+ "contentVersion": "1.0.0.0", "resources": [{"location": "westus", "properties":
+ {"virtualNetworkGateway1": {"id": "/subscriptions/0b1f6471-1bf0-4dda-aec3-cb9272f09590/resourceGroups/cli_test_active_active_cross_premise_connection/providers/Microsoft.Network/virtualNetworkGateways/gw1"},
+ "routingWeight": 10, "connectionType": "IPSec", "authorizationKey": null, "localNetworkGateway2":
+ {"id": "/subscriptions/0b1f6471-1bf0-4dda-aec3-cb9272f09590/resourceGroups/cli_test_active_active_cross_premise_connection/providers/Microsoft.Network/localNetworkGateways/lgw2"},
+ "enableBgp": true, "sharedKey": "abc123"}, "apiVersion": "2015-06-15", "tags":
+ {}, "dependsOn": [], "type": "Microsoft.Network/connections", "name": "Vnet1toSite5_1"}],
+ "variables": {}, "parameters": {}}, "parameters": {}}}'
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Length: ['1034']
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 resourcemanagementclient/0.30.2 Azure-SDK-For-Python
+ AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [738aab4a-1646-11e7-9c76-a0b3ccf7272a]
+ method: PUT
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_active_active_cross_premise_connection/providers/Microsoft.Resources/deployments/mock-deployment?api-version=2016-09-01
+ response:
+ body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_cross_premise_connection/providers/Microsoft.Resources/deployments/vpn_connection_deploy_ggIDURKxCVkgx93JBNCfeNzIzqHwyArL","name":"vpn_connection_deploy_ggIDURKxCVkgx93JBNCfeNzIzqHwyArL","properties":{"templateHash":"17600274930576475001","parameters":{},"mode":"Incremental","provisioningState":"Accepted","timestamp":"2017-03-31T19:15:51.1136755Z","duration":"PT0.4825926S","correlationId":"73a2916e-424e-4a47-a67a-a5279c1ecaf0","providers":[{"namespace":"Microsoft.Network","resourceTypes":[{"resourceType":"connections","locations":["westus"]}]}],"dependencies":[]}}'}
+ headers:
+ Azure-AsyncOperation: ['https://management.azure.com/subscriptions/0b1f6471-1bf0-4dda-aec3-cb9272f09590/resourcegroups/cli_test_active_active_cross_premise_connection/providers/Microsoft.Resources/deployments/vpn_connection_deploy_ggIDURKxCVkgx93JBNCfeNzIzqHwyArL/operationStatuses/08587106191348465278?api-version=2016-09-01']
+ Cache-Control: [no-cache]
+ Content-Length: ['673']
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:15:50 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ x-ms-ratelimit-remaining-subscription-writes: ['1197']
+ status: {code: 201, message: Created}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 resourcemanagementclient/0.30.2 Azure-SDK-For-Python
+ AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [738aab4a-1646-11e7-9c76-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_active_active_cross_premise_connection/providers/Microsoft.Resources/deployments/mock-deployment/operationStatuses/08587106191348465278?api-version=2016-09-01
+ response:
+ body: {string: '{"status":"Succeeded"}'}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:16:20 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Vary: [Accept-Encoding]
+ content-length: ['22']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 resourcemanagementclient/0.30.2 Azure-SDK-For-Python
+ AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [738aab4a-1646-11e7-9c76-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_active_active_cross_premise_connection/providers/Microsoft.Resources/deployments/mock-deployment?api-version=2016-09-01
+ response:
+ body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_cross_premise_connection/providers/Microsoft.Resources/deployments/vpn_connection_deploy_ggIDURKxCVkgx93JBNCfeNzIzqHwyArL","name":"vpn_connection_deploy_ggIDURKxCVkgx93JBNCfeNzIzqHwyArL","properties":{"templateHash":"17600274930576475001","parameters":{},"mode":"Incremental","provisioningState":"Succeeded","timestamp":"2017-03-31T19:16:18.5897816Z","duration":"PT27.9586987S","correlationId":"73a2916e-424e-4a47-a67a-a5279c1ecaf0","providers":[{"namespace":"Microsoft.Network","resourceTypes":[{"resourceType":"connections","locations":["westus"]}]}],"dependencies":[],"outputs":{"resource":{"type":"Object","value":{"provisioningState":"Succeeded","resourceGuid":"f3e63a38-9d31-4242-bd5c-ba5af1ac1873","virtualNetworkGateway1":{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_cross_premise_connection/providers/Microsoft.Network/virtualNetworkGateways/gw1"},"localNetworkGateway2":{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_cross_premise_connection/providers/Microsoft.Network/localNetworkGateways/lgw2"},"connectionType":"IPsec","routingWeight":10,"sharedKey":"abc123","enableBgp":true,"connectionStatus":"Unknown","ingressBytesTransferred":0,"egressBytesTransferred":0}}},"outputResources":[{"id":"Microsoft.Network/connections/Vnet1toSite5_1"}]}}'}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:16:21 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Vary: [Accept-Encoding]
+ content-length: ['1469']
+ status: {code: 200, message: OK}
+- request:
+ body: '{"location": "eastus", "properties": {"gatewayIpAddress": "131.107.72.23",
+ "localNetworkAddressSpace": {"addressPrefixes": ["10.52.255.254/32"]}, "bgpSettings":
+ {"asn": 65050, "bgpPeeringAddress": "10.52.255.254"}}}'
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Length: ['215']
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [86fbd8ae-1646-11e7-bb9a-a0b3ccf7272a]
+ method: PUT
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_cross_premise_connection/providers/Microsoft.Network/localNetworkGateways/lgw3?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"name\": \"lgw3\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_cross_premise_connection/providers/Microsoft.Network/localNetworkGateways/lgw3\"\
+ ,\r\n \"etag\": \"W/\\\"9ff7983f-b829-40e6-8f85-f936134395e6\\\"\",\r\n \
+ \ \"type\": \"Microsoft.Network/localNetworkGateways\",\r\n \"location\"\
+ : \"eastus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"resourceGuid\": \"68b9dccc-9f89-4313-8b7c-bfb2bc47cc7f\",\r\n \
+ \ \"localNetworkAddressSpace\": {\r\n \"addressPrefixes\": [\r\n \
+ \ \"10.52.255.254/32\"\r\n ]\r\n },\r\n \"gatewayIpAddress\"\
+ : \"131.107.72.23\",\r\n \"bgpSettings\": {\r\n \"asn\": 65050,\r\n\
+ \ \"bgpPeeringAddress\": \"10.52.255.254\",\r\n \"peerWeight\":\
+ \ 0\r\n }\r\n }\r\n}"}
+ headers:
+ Azure-AsyncOperation: ['https://management.azure.com/subscriptions/0b1f6471-1bf0-4dda-aec3-cb9272f09590/providers/Microsoft.Network/locations/eastus/operations/0237915f-68ad-452e-b7cc-22b9b0ac0d02?api-version=2016-09-01']
+ Cache-Control: [no-cache]
+ Content-Length: ['736']
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:16:23 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ x-ms-ratelimit-remaining-subscription-writes: ['1198']
+ status: {code: 201, message: Created}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [86fbd8ae-1646-11e7-bb9a-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Network/locations/eastus/operations/0237915f-68ad-452e-b7cc-22b9b0ac0d02?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"status\": \"Succeeded\"\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:16:33 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['29']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [86fbd8ae-1646-11e7-bb9a-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_cross_premise_connection/providers/Microsoft.Network/localNetworkGateways/lgw3?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"name\": \"lgw3\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_cross_premise_connection/providers/Microsoft.Network/localNetworkGateways/lgw3\"\
+ ,\r\n \"etag\": \"W/\\\"5177222f-cf52-4fd7-94b4-c65554668cbf\\\"\",\r\n \
+ \ \"type\": \"Microsoft.Network/localNetworkGateways\",\r\n \"location\"\
+ : \"eastus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Succeeded\"\
+ ,\r\n \"resourceGuid\": \"68b9dccc-9f89-4313-8b7c-bfb2bc47cc7f\",\r\n \
+ \ \"localNetworkAddressSpace\": {\r\n \"addressPrefixes\": [\r\n \
+ \ \"10.52.255.254/32\"\r\n ]\r\n },\r\n \"gatewayIpAddress\"\
+ : \"131.107.72.23\",\r\n \"bgpSettings\": {\r\n \"asn\": 65050,\r\n\
+ \ \"bgpPeeringAddress\": \"10.52.255.254\",\r\n \"peerWeight\":\
+ \ 0\r\n }\r\n }\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:16:34 GMT']
+ ETag: [W/"5177222f-cf52-4fd7-94b4-c65554668cbf"]
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['737']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 resourcemanagementclient/0.30.2 Azure-SDK-For-Python
+ AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [8f19d912-1646-11e7-b148-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_active_active_cross_premise_connection?api-version=2016-09-01
+ response:
+ body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_cross_premise_connection","name":"cli_test_active_active_cross_premise_connection","location":"westus","tags":{"use":"az-test"},"properties":{"provisioningState":"Succeeded"}}'}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:16:36 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Vary: [Accept-Encoding]
+ content-length: ['284']
+ status: {code: 200, message: OK}
+- request:
+ body: '{"properties": {"mode": "Incremental", "template": {"outputs": {"resource":
+ {"value": "[reference(''Vnet1toSite5_2'')]", "type": "object"}}, "$schema":
+ "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#",
+ "contentVersion": "1.0.0.0", "resources": [{"location": "westus", "properties":
+ {"virtualNetworkGateway1": {"id": "/subscriptions/0b1f6471-1bf0-4dda-aec3-cb9272f09590/resourceGroups/cli_test_active_active_cross_premise_connection/providers/Microsoft.Network/virtualNetworkGateways/gw1"},
+ "routingWeight": 10, "connectionType": "IPSec", "authorizationKey": null, "localNetworkGateway2":
+ {"id": "/subscriptions/0b1f6471-1bf0-4dda-aec3-cb9272f09590/resourceGroups/cli_test_active_active_cross_premise_connection/providers/Microsoft.Network/localNetworkGateways/lgw3"},
+ "enableBgp": true, "sharedKey": "abc123"}, "apiVersion": "2015-06-15", "tags":
+ {}, "dependsOn": [], "type": "Microsoft.Network/connections", "name": "Vnet1toSite5_2"}],
+ "variables": {}, "parameters": {}}, "parameters": {}}}'
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Length: ['1034']
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 resourcemanagementclient/0.30.2 Azure-SDK-For-Python
+ AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [8f4ae37a-1646-11e7-a4bb-a0b3ccf7272a]
+ method: PUT
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_active_active_cross_premise_connection/providers/Microsoft.Resources/deployments/mock-deployment?api-version=2016-09-01
+ response:
+ body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_cross_premise_connection/providers/Microsoft.Resources/deployments/vpn_connection_deploy_xXBbcjdOc5ia5NtxottD6l9vsakCw1j4","name":"vpn_connection_deploy_xXBbcjdOc5ia5NtxottD6l9vsakCw1j4","properties":{"templateHash":"14690740186505567652","parameters":{},"mode":"Incremental","provisioningState":"Accepted","timestamp":"2017-03-31T19:16:38.095127Z","duration":"PT0.7400646S","correlationId":"15cf548c-6484-4013-ac87-9e300bf2757f","providers":[{"namespace":"Microsoft.Network","resourceTypes":[{"resourceType":"connections","locations":["westus"]}]}],"dependencies":[]}}'}
+ headers:
+ Azure-AsyncOperation: ['https://management.azure.com/subscriptions/0b1f6471-1bf0-4dda-aec3-cb9272f09590/resourcegroups/cli_test_active_active_cross_premise_connection/providers/Microsoft.Resources/deployments/vpn_connection_deploy_xXBbcjdOc5ia5NtxottD6l9vsakCw1j4/operationStatuses/08587106190881225560?api-version=2016-09-01']
+ Cache-Control: [no-cache]
+ Content-Length: ['672']
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:16:37 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ x-ms-ratelimit-remaining-subscription-writes: ['1199']
+ status: {code: 201, message: Created}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 resourcemanagementclient/0.30.2 Azure-SDK-For-Python
+ AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [8f4ae37a-1646-11e7-a4bb-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_active_active_cross_premise_connection/providers/Microsoft.Resources/deployments/mock-deployment/operationStatuses/08587106190881225560?api-version=2016-09-01
+ response:
+ body: {string: '{"status":"Running"}'}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:17:07 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Vary: [Accept-Encoding]
+ content-length: ['20']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 resourcemanagementclient/0.30.2 Azure-SDK-For-Python
+ AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [8f4ae37a-1646-11e7-a4bb-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_active_active_cross_premise_connection/providers/Microsoft.Resources/deployments/mock-deployment/operationStatuses/08587106190881225560?api-version=2016-09-01
+ response:
+ body: {string: '{"status":"Succeeded"}'}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:17:38 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Vary: [Accept-Encoding]
+ content-length: ['22']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 resourcemanagementclient/0.30.2 Azure-SDK-For-Python
+ AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [8f4ae37a-1646-11e7-a4bb-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_active_active_cross_premise_connection/providers/Microsoft.Resources/deployments/mock-deployment?api-version=2016-09-01
+ response:
+ body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_cross_premise_connection/providers/Microsoft.Resources/deployments/vpn_connection_deploy_xXBbcjdOc5ia5NtxottD6l9vsakCw1j4","name":"vpn_connection_deploy_xXBbcjdOc5ia5NtxottD6l9vsakCw1j4","properties":{"templateHash":"14690740186505567652","parameters":{},"mode":"Incremental","provisioningState":"Succeeded","timestamp":"2017-03-31T19:17:09.4950858Z","duration":"PT32.1400234S","correlationId":"15cf548c-6484-4013-ac87-9e300bf2757f","providers":[{"namespace":"Microsoft.Network","resourceTypes":[{"resourceType":"connections","locations":["westus"]}]}],"dependencies":[],"outputs":{"resource":{"type":"Object","value":{"provisioningState":"Succeeded","resourceGuid":"bb903a0e-e341-4f3b-824e-86c2f4e63abb","virtualNetworkGateway1":{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_cross_premise_connection/providers/Microsoft.Network/virtualNetworkGateways/gw1"},"localNetworkGateway2":{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_cross_premise_connection/providers/Microsoft.Network/localNetworkGateways/lgw3"},"connectionType":"IPsec","routingWeight":10,"sharedKey":"abc123","enableBgp":true,"connectionStatus":"Unknown","ingressBytesTransferred":0,"egressBytesTransferred":0}}},"outputResources":[{"id":"Microsoft.Network/connections/Vnet1toSite5_2"}]}}'}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 19:17:39 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Vary: [Accept-Encoding]
+ content-length: ['1469']
+ status: {code: 200, message: OK}
+version: 1
diff --git a/src/command_modules/azure-cli-network/tests/recordings/test_network_active_active_vnet_vnet_connection.yaml b/src/command_modules/azure-cli-network/tests/recordings/test_network_active_active_vnet_vnet_connection.yaml
new file mode 100644
index 000000000..434934460
--- /dev/null
+++ b/src/command_modules/azure-cli-network/tests/recordings/test_network_active_active_vnet_vnet_connection.yaml
@@ -0,0 +1,2886 @@
+interactions:
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 resourcemanagementclient/0.30.2 Azure-SDK-For-Python
+ AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [1be8e480-164e-11e7-b9cb-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_active_active_vnet_vnet_connection?api-version=2016-09-01
+ response:
+ body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection","name":"cli_test_active_active_vnet_vnet_connection","location":"westus","tags":{"use":"az-test"},"properties":{"provisioningState":"Succeeded"}}'}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:10:39 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Vary: [Accept-Encoding]
+ content-length: ['276']
+ status: {code: 200, message: OK}
+- request:
+ body: '{"location": "westus", "properties": {"enableBgp": true, "vpnType": "RouteBased",
+ "activeActive": true, "bgpSettings": {"asn": 65010}, "ipConfigurations": [{"properties":
+ {"publicIPAddress": {"id": "/subscriptions/0b1f6471-1bf0-4dda-aec3-cb9272f09590/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip1"},
+ "subnet": {"id": "/subscriptions/0b1f6471-1bf0-4dda-aec3-cb9272f09590/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet"},
+ "privateIPAllocationMethod": "Dynamic"}, "name": "vnetGatewayConfig0"}, {"properties":
+ {"publicIPAddress": {"id": "/subscriptions/0b1f6471-1bf0-4dda-aec3-cb9272f09590/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip2"},
+ "subnet": {"id": "/subscriptions/0b1f6471-1bf0-4dda-aec3-cb9272f09590/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet"},
+ "privateIPAllocationMethod": "Dynamic"}, "name": "vnetGatewayConfig1"}], "gatewayType":
+ "Vpn", "sku": {"name": "HighPerformance", "tier": "HighPerformance"}}}'
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Length: ['1238']
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [1bfa0c06-164e-11e7-8cd2-a0b3ccf7272a]
+ method: PUT
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"name\": \"vgw1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\",\r\n \
+ \ \"type\": \"Microsoft.Network/virtualNetworkGateways\",\r\n \"location\"\
+ : \"westus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"resourceGuid\": \"9d779263-30f7-4d8d-8039-6d8a421c689a\",\r\n \
+ \ \"ipConfigurations\": [\r\n {\r\n \"name\": \"vnetGatewayConfig0\"\
+ ,\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig0\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip1\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n },\r\n {\r\n \"name\": \"\
+ vnetGatewayConfig1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip2\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n }\r\n ],\r\n \"sku\": {\r\n \
+ \ \"name\": \"HighPerformance\",\r\n \"tier\": \"HighPerformance\"\
+ ,\r\n \"capacity\": 2\r\n },\r\n \"gatewayType\": \"Vpn\",\r\n\
+ \ \"vpnType\": \"RouteBased\",\r\n \"enableBgp\": true,\r\n \"activeActive\"\
+ : true,\r\n \"vpnClientConfiguration\": {\r\n \"vpnClientRootCertificates\"\
+ : [],\r\n \"vpnClientRevokedCertificates\": []\r\n },\r\n \"bgpSettings\"\
+ : {\r\n \"asn\": 65010,\r\n \"peerWeight\": 0\r\n }\r\n }\r\n\
+ }"}
+ headers:
+ Azure-AsyncOperation: ['https://management.azure.com/subscriptions/0b1f6471-1bf0-4dda-aec3-cb9272f09590/providers/Microsoft.Network/locations/westus/operations/6e8922bb-0002-4b58-be2d-4e7f8a8cc282?api-version=2016-09-01']
+ Cache-Control: [no-cache]
+ Content-Length: ['2841']
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:10:39 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ x-ms-ratelimit-remaining-subscription-writes: ['1198']
+ status: {code: 201, message: Created}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 resourcemanagementclient/0.30.2 Azure-SDK-For-Python
+ AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [1cd6ebd2-164e-11e7-915f-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_active_active_vnet_vnet_connection?api-version=2016-09-01
+ response:
+ body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection","name":"cli_test_active_active_vnet_vnet_connection","location":"westus","tags":{"use":"az-test"},"properties":{"provisioningState":"Succeeded"}}'}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:10:40 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Vary: [Accept-Encoding]
+ content-length: ['276']
+ status: {code: 200, message: OK}
+- request:
+ body: '{"location": "westus", "properties": {"enableBgp": true, "vpnType": "RouteBased",
+ "activeActive": true, "bgpSettings": {"asn": 65020}, "ipConfigurations": [{"properties":
+ {"publicIPAddress": {"id": "/subscriptions/0b1f6471-1bf0-4dda-aec3-cb9272f09590/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw2ip1"},
+ "subnet": {"id": "/subscriptions/0b1f6471-1bf0-4dda-aec3-cb9272f09590/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet2/subnets/GatewaySubnet"},
+ "privateIPAllocationMethod": "Dynamic"}, "name": "vnetGatewayConfig0"}, {"properties":
+ {"publicIPAddress": {"id": "/subscriptions/0b1f6471-1bf0-4dda-aec3-cb9272f09590/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw2ip2"},
+ "subnet": {"id": "/subscriptions/0b1f6471-1bf0-4dda-aec3-cb9272f09590/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet2/subnets/GatewaySubnet"},
+ "privateIPAllocationMethod": "Dynamic"}, "name": "vnetGatewayConfig1"}], "gatewayType":
+ "Vpn", "sku": {"name": "HighPerformance", "tier": "HighPerformance"}}}'
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Length: ['1238']
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [1cec0f64-164e-11e7-b3c3-a0b3ccf7272a]
+ method: PUT
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw2?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"name\": \"vgw2\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw2\"\
+ ,\r\n \"etag\": \"W/\\\"f4f3c739-c5cd-47ab-8db8-b3c0c3bdeea0\\\"\",\r\n \
+ \ \"type\": \"Microsoft.Network/virtualNetworkGateways\",\r\n \"location\"\
+ : \"westus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"resourceGuid\": \"97c54646-d873-475c-9bef-da41edb4c0fc\",\r\n \
+ \ \"ipConfigurations\": [\r\n {\r\n \"name\": \"vnetGatewayConfig0\"\
+ ,\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw2/ipConfigurations/vnetGatewayConfig0\"\
+ ,\r\n \"etag\": \"W/\\\"f4f3c739-c5cd-47ab-8db8-b3c0c3bdeea0\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw2ip1\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet2/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n },\r\n {\r\n \"name\": \"\
+ vnetGatewayConfig1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw2/ipConfigurations/vnetGatewayConfig1\"\
+ ,\r\n \"etag\": \"W/\\\"f4f3c739-c5cd-47ab-8db8-b3c0c3bdeea0\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw2ip2\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet2/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n }\r\n ],\r\n \"sku\": {\r\n \
+ \ \"name\": \"HighPerformance\",\r\n \"tier\": \"HighPerformance\"\
+ ,\r\n \"capacity\": 2\r\n },\r\n \"gatewayType\": \"Vpn\",\r\n\
+ \ \"vpnType\": \"RouteBased\",\r\n \"enableBgp\": true,\r\n \"activeActive\"\
+ : true,\r\n \"vpnClientConfiguration\": {\r\n \"vpnClientRootCertificates\"\
+ : [],\r\n \"vpnClientRevokedCertificates\": []\r\n },\r\n \"bgpSettings\"\
+ : {\r\n \"asn\": 65020,\r\n \"peerWeight\": 0\r\n }\r\n }\r\n\
+ }"}
+ headers:
+ Azure-AsyncOperation: ['https://management.azure.com/subscriptions/0b1f6471-1bf0-4dda-aec3-cb9272f09590/providers/Microsoft.Network/locations/westus/operations/324d4f3c-4725-41d0-93f6-934f78c72d51?api-version=2016-09-01']
+ Cache-Control: [no-cache]
+ Content-Length: ['2841']
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:10:40 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Retry-After: ['10']
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ x-ms-ratelimit-remaining-subscription-writes: ['1197']
+ status: {code: 201, message: Created}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [1dc202f6-164e-11e7-90e8-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"name\": \"vgw1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\",\r\n \
+ \ \"type\": \"Microsoft.Network/virtualNetworkGateways\",\r\n \"location\"\
+ : \"westus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"resourceGuid\": \"9d779263-30f7-4d8d-8039-6d8a421c689a\",\r\n \
+ \ \"ipConfigurations\": [\r\n {\r\n \"name\": \"vnetGatewayConfig0\"\
+ ,\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig0\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip1\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n },\r\n {\r\n \"name\": \"\
+ vnetGatewayConfig1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip2\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n }\r\n ],\r\n \"sku\": {\r\n \
+ \ \"name\": \"HighPerformance\",\r\n \"tier\": \"HighPerformance\"\
+ ,\r\n \"capacity\": 2\r\n },\r\n \"gatewayType\": \"Vpn\",\r\n\
+ \ \"vpnType\": \"RouteBased\",\r\n \"enableBgp\": true,\r\n \"activeActive\"\
+ : true,\r\n \"bgpSettings\": {\r\n \"asn\": 65010,\r\n \"peerWeight\"\
+ : 0\r\n }\r\n }\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:10:41 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['2718']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2fcdd3cc-164e-11e7-8547-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"name\": \"vgw1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\",\r\n \
+ \ \"type\": \"Microsoft.Network/virtualNetworkGateways\",\r\n \"location\"\
+ : \"westus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"resourceGuid\": \"9d779263-30f7-4d8d-8039-6d8a421c689a\",\r\n \
+ \ \"ipConfigurations\": [\r\n {\r\n \"name\": \"vnetGatewayConfig0\"\
+ ,\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig0\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip1\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n },\r\n {\r\n \"name\": \"\
+ vnetGatewayConfig1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip2\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n }\r\n ],\r\n \"sku\": {\r\n \
+ \ \"name\": \"HighPerformance\",\r\n \"tier\": \"HighPerformance\"\
+ ,\r\n \"capacity\": 2\r\n },\r\n \"gatewayType\": \"Vpn\",\r\n\
+ \ \"vpnType\": \"RouteBased\",\r\n \"enableBgp\": true,\r\n \"activeActive\"\
+ : true,\r\n \"bgpSettings\": {\r\n \"asn\": 65010,\r\n \"peerWeight\"\
+ : 0\r\n }\r\n }\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:11:11 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['2718']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [41e4d886-164e-11e7-a839-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"name\": \"vgw1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\",\r\n \
+ \ \"type\": \"Microsoft.Network/virtualNetworkGateways\",\r\n \"location\"\
+ : \"westus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"resourceGuid\": \"9d779263-30f7-4d8d-8039-6d8a421c689a\",\r\n \
+ \ \"ipConfigurations\": [\r\n {\r\n \"name\": \"vnetGatewayConfig0\"\
+ ,\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig0\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip1\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n },\r\n {\r\n \"name\": \"\
+ vnetGatewayConfig1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip2\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n }\r\n ],\r\n \"sku\": {\r\n \
+ \ \"name\": \"HighPerformance\",\r\n \"tier\": \"HighPerformance\"\
+ ,\r\n \"capacity\": 2\r\n },\r\n \"gatewayType\": \"Vpn\",\r\n\
+ \ \"vpnType\": \"RouteBased\",\r\n \"enableBgp\": true,\r\n \"activeActive\"\
+ : true,\r\n \"bgpSettings\": {\r\n \"asn\": 65010,\r\n \"peerWeight\"\
+ : 0\r\n }\r\n }\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:11:42 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['2718']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [53eb929e-164e-11e7-8cb3-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"name\": \"vgw1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\",\r\n \
+ \ \"type\": \"Microsoft.Network/virtualNetworkGateways\",\r\n \"location\"\
+ : \"westus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"resourceGuid\": \"9d779263-30f7-4d8d-8039-6d8a421c689a\",\r\n \
+ \ \"ipConfigurations\": [\r\n {\r\n \"name\": \"vnetGatewayConfig0\"\
+ ,\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig0\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip1\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n },\r\n {\r\n \"name\": \"\
+ vnetGatewayConfig1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip2\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n }\r\n ],\r\n \"sku\": {\r\n \
+ \ \"name\": \"HighPerformance\",\r\n \"tier\": \"HighPerformance\"\
+ ,\r\n \"capacity\": 2\r\n },\r\n \"gatewayType\": \"Vpn\",\r\n\
+ \ \"vpnType\": \"RouteBased\",\r\n \"enableBgp\": true,\r\n \"activeActive\"\
+ : true,\r\n \"bgpSettings\": {\r\n \"asn\": 65010,\r\n \"peerWeight\"\
+ : 0\r\n }\r\n }\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:12:12 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['2718']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [66345730-164e-11e7-baf2-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"name\": \"vgw1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\",\r\n \
+ \ \"type\": \"Microsoft.Network/virtualNetworkGateways\",\r\n \"location\"\
+ : \"westus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"resourceGuid\": \"9d779263-30f7-4d8d-8039-6d8a421c689a\",\r\n \
+ \ \"ipConfigurations\": [\r\n {\r\n \"name\": \"vnetGatewayConfig0\"\
+ ,\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig0\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip1\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n },\r\n {\r\n \"name\": \"\
+ vnetGatewayConfig1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip2\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n }\r\n ],\r\n \"sku\": {\r\n \
+ \ \"name\": \"HighPerformance\",\r\n \"tier\": \"HighPerformance\"\
+ ,\r\n \"capacity\": 2\r\n },\r\n \"gatewayType\": \"Vpn\",\r\n\
+ \ \"vpnType\": \"RouteBased\",\r\n \"enableBgp\": true,\r\n \"activeActive\"\
+ : true,\r\n \"bgpSettings\": {\r\n \"asn\": 65010,\r\n \"peerWeight\"\
+ : 0\r\n }\r\n }\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:12:43 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['2718']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [7844e8d8-164e-11e7-a193-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"name\": \"vgw1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\",\r\n \
+ \ \"type\": \"Microsoft.Network/virtualNetworkGateways\",\r\n \"location\"\
+ : \"westus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"resourceGuid\": \"9d779263-30f7-4d8d-8039-6d8a421c689a\",\r\n \
+ \ \"ipConfigurations\": [\r\n {\r\n \"name\": \"vnetGatewayConfig0\"\
+ ,\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig0\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip1\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n },\r\n {\r\n \"name\": \"\
+ vnetGatewayConfig1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip2\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n }\r\n ],\r\n \"sku\": {\r\n \
+ \ \"name\": \"HighPerformance\",\r\n \"tier\": \"HighPerformance\"\
+ ,\r\n \"capacity\": 2\r\n },\r\n \"gatewayType\": \"Vpn\",\r\n\
+ \ \"vpnType\": \"RouteBased\",\r\n \"enableBgp\": true,\r\n \"activeActive\"\
+ : true,\r\n \"bgpSettings\": {\r\n \"asn\": 65010,\r\n \"peerWeight\"\
+ : 0\r\n }\r\n }\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:13:14 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['2718']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [8a8ad4e4-164e-11e7-b743-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"name\": \"vgw1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\",\r\n \
+ \ \"type\": \"Microsoft.Network/virtualNetworkGateways\",\r\n \"location\"\
+ : \"westus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"resourceGuid\": \"9d779263-30f7-4d8d-8039-6d8a421c689a\",\r\n \
+ \ \"ipConfigurations\": [\r\n {\r\n \"name\": \"vnetGatewayConfig0\"\
+ ,\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig0\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip1\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n },\r\n {\r\n \"name\": \"\
+ vnetGatewayConfig1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip2\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n }\r\n ],\r\n \"sku\": {\r\n \
+ \ \"name\": \"HighPerformance\",\r\n \"tier\": \"HighPerformance\"\
+ ,\r\n \"capacity\": 2\r\n },\r\n \"gatewayType\": \"Vpn\",\r\n\
+ \ \"vpnType\": \"RouteBased\",\r\n \"enableBgp\": true,\r\n \"activeActive\"\
+ : true,\r\n \"bgpSettings\": {\r\n \"asn\": 65010,\r\n \"peerWeight\"\
+ : 0\r\n }\r\n }\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:13:44 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['2718']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [9cc92a8a-164e-11e7-8e00-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"name\": \"vgw1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\",\r\n \
+ \ \"type\": \"Microsoft.Network/virtualNetworkGateways\",\r\n \"location\"\
+ : \"westus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"resourceGuid\": \"9d779263-30f7-4d8d-8039-6d8a421c689a\",\r\n \
+ \ \"ipConfigurations\": [\r\n {\r\n \"name\": \"vnetGatewayConfig0\"\
+ ,\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig0\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip1\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n },\r\n {\r\n \"name\": \"\
+ vnetGatewayConfig1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip2\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n }\r\n ],\r\n \"sku\": {\r\n \
+ \ \"name\": \"HighPerformance\",\r\n \"tier\": \"HighPerformance\"\
+ ,\r\n \"capacity\": 2\r\n },\r\n \"gatewayType\": \"Vpn\",\r\n\
+ \ \"vpnType\": \"RouteBased\",\r\n \"enableBgp\": true,\r\n \"activeActive\"\
+ : true,\r\n \"bgpSettings\": {\r\n \"asn\": 65010,\r\n \"peerWeight\"\
+ : 0\r\n }\r\n }\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:14:15 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['2718']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [aee32c5a-164e-11e7-b3f3-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"name\": \"vgw1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\",\r\n \
+ \ \"type\": \"Microsoft.Network/virtualNetworkGateways\",\r\n \"location\"\
+ : \"westus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"resourceGuid\": \"9d779263-30f7-4d8d-8039-6d8a421c689a\",\r\n \
+ \ \"ipConfigurations\": [\r\n {\r\n \"name\": \"vnetGatewayConfig0\"\
+ ,\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig0\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip1\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n },\r\n {\r\n \"name\": \"\
+ vnetGatewayConfig1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip2\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n }\r\n ],\r\n \"sku\": {\r\n \
+ \ \"name\": \"HighPerformance\",\r\n \"tier\": \"HighPerformance\"\
+ ,\r\n \"capacity\": 2\r\n },\r\n \"gatewayType\": \"Vpn\",\r\n\
+ \ \"vpnType\": \"RouteBased\",\r\n \"enableBgp\": true,\r\n \"activeActive\"\
+ : true,\r\n \"bgpSettings\": {\r\n \"asn\": 65010,\r\n \"peerWeight\"\
+ : 0\r\n }\r\n }\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:14:44 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['2718']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [c0ec2ffa-164e-11e7-9b48-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"name\": \"vgw1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\",\r\n \
+ \ \"type\": \"Microsoft.Network/virtualNetworkGateways\",\r\n \"location\"\
+ : \"westus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"resourceGuid\": \"9d779263-30f7-4d8d-8039-6d8a421c689a\",\r\n \
+ \ \"ipConfigurations\": [\r\n {\r\n \"name\": \"vnetGatewayConfig0\"\
+ ,\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig0\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip1\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n },\r\n {\r\n \"name\": \"\
+ vnetGatewayConfig1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip2\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n }\r\n ],\r\n \"sku\": {\r\n \
+ \ \"name\": \"HighPerformance\",\r\n \"tier\": \"HighPerformance\"\
+ ,\r\n \"capacity\": 2\r\n },\r\n \"gatewayType\": \"Vpn\",\r\n\
+ \ \"vpnType\": \"RouteBased\",\r\n \"enableBgp\": true,\r\n \"activeActive\"\
+ : true,\r\n \"bgpSettings\": {\r\n \"asn\": 65010,\r\n \"peerWeight\"\
+ : 0\r\n }\r\n }\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:15:15 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['2718']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [d326f430-164e-11e7-bf3d-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"name\": \"vgw1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\",\r\n \
+ \ \"type\": \"Microsoft.Network/virtualNetworkGateways\",\r\n \"location\"\
+ : \"westus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"resourceGuid\": \"9d779263-30f7-4d8d-8039-6d8a421c689a\",\r\n \
+ \ \"ipConfigurations\": [\r\n {\r\n \"name\": \"vnetGatewayConfig0\"\
+ ,\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig0\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip1\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n },\r\n {\r\n \"name\": \"\
+ vnetGatewayConfig1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip2\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n }\r\n ],\r\n \"sku\": {\r\n \
+ \ \"name\": \"HighPerformance\",\r\n \"tier\": \"HighPerformance\"\
+ ,\r\n \"capacity\": 2\r\n },\r\n \"gatewayType\": \"Vpn\",\r\n\
+ \ \"vpnType\": \"RouteBased\",\r\n \"enableBgp\": true,\r\n \"activeActive\"\
+ : true,\r\n \"bgpSettings\": {\r\n \"asn\": 65010,\r\n \"peerWeight\"\
+ : 0\r\n }\r\n }\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:15:46 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['2718']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [e55d8434-164e-11e7-955a-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"name\": \"vgw1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\",\r\n \
+ \ \"type\": \"Microsoft.Network/virtualNetworkGateways\",\r\n \"location\"\
+ : \"westus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"resourceGuid\": \"9d779263-30f7-4d8d-8039-6d8a421c689a\",\r\n \
+ \ \"ipConfigurations\": [\r\n {\r\n \"name\": \"vnetGatewayConfig0\"\
+ ,\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig0\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip1\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n },\r\n {\r\n \"name\": \"\
+ vnetGatewayConfig1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip2\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n }\r\n ],\r\n \"sku\": {\r\n \
+ \ \"name\": \"HighPerformance\",\r\n \"tier\": \"HighPerformance\"\
+ ,\r\n \"capacity\": 2\r\n },\r\n \"gatewayType\": \"Vpn\",\r\n\
+ \ \"vpnType\": \"RouteBased\",\r\n \"enableBgp\": true,\r\n \"activeActive\"\
+ : true,\r\n \"bgpSettings\": {\r\n \"asn\": 65010,\r\n \"peerWeight\"\
+ : 0\r\n }\r\n }\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:16:17 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['2718']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [f78b37cc-164e-11e7-bc10-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"name\": \"vgw1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\",\r\n \
+ \ \"type\": \"Microsoft.Network/virtualNetworkGateways\",\r\n \"location\"\
+ : \"westus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"resourceGuid\": \"9d779263-30f7-4d8d-8039-6d8a421c689a\",\r\n \
+ \ \"ipConfigurations\": [\r\n {\r\n \"name\": \"vnetGatewayConfig0\"\
+ ,\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig0\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip1\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n },\r\n {\r\n \"name\": \"\
+ vnetGatewayConfig1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip2\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n }\r\n ],\r\n \"sku\": {\r\n \
+ \ \"name\": \"HighPerformance\",\r\n \"tier\": \"HighPerformance\"\
+ ,\r\n \"capacity\": 2\r\n },\r\n \"gatewayType\": \"Vpn\",\r\n\
+ \ \"vpnType\": \"RouteBased\",\r\n \"enableBgp\": true,\r\n \"activeActive\"\
+ : true,\r\n \"bgpSettings\": {\r\n \"asn\": 65010,\r\n \"peerWeight\"\
+ : 0\r\n }\r\n }\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:16:47 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['2718']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [09b8e7ac-164f-11e7-bc3d-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"name\": \"vgw1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\",\r\n \
+ \ \"type\": \"Microsoft.Network/virtualNetworkGateways\",\r\n \"location\"\
+ : \"westus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"resourceGuid\": \"9d779263-30f7-4d8d-8039-6d8a421c689a\",\r\n \
+ \ \"ipConfigurations\": [\r\n {\r\n \"name\": \"vnetGatewayConfig0\"\
+ ,\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig0\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip1\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n },\r\n {\r\n \"name\": \"\
+ vnetGatewayConfig1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip2\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n }\r\n ],\r\n \"sku\": {\r\n \
+ \ \"name\": \"HighPerformance\",\r\n \"tier\": \"HighPerformance\"\
+ ,\r\n \"capacity\": 2\r\n },\r\n \"gatewayType\": \"Vpn\",\r\n\
+ \ \"vpnType\": \"RouteBased\",\r\n \"enableBgp\": true,\r\n \"activeActive\"\
+ : true,\r\n \"bgpSettings\": {\r\n \"asn\": 65010,\r\n \"peerWeight\"\
+ : 0\r\n }\r\n }\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:17:17 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['2718']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [1bf765ac-164f-11e7-8b72-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"name\": \"vgw1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\",\r\n \
+ \ \"type\": \"Microsoft.Network/virtualNetworkGateways\",\r\n \"location\"\
+ : \"westus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"resourceGuid\": \"9d779263-30f7-4d8d-8039-6d8a421c689a\",\r\n \
+ \ \"ipConfigurations\": [\r\n {\r\n \"name\": \"vnetGatewayConfig0\"\
+ ,\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig0\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip1\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n },\r\n {\r\n \"name\": \"\
+ vnetGatewayConfig1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip2\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n }\r\n ],\r\n \"sku\": {\r\n \
+ \ \"name\": \"HighPerformance\",\r\n \"tier\": \"HighPerformance\"\
+ ,\r\n \"capacity\": 2\r\n },\r\n \"gatewayType\": \"Vpn\",\r\n\
+ \ \"vpnType\": \"RouteBased\",\r\n \"enableBgp\": true,\r\n \"activeActive\"\
+ : true,\r\n \"bgpSettings\": {\r\n \"asn\": 65010,\r\n \"peerWeight\"\
+ : 0\r\n }\r\n }\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:17:48 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['2718']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2e2a32a2-164f-11e7-9af4-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"name\": \"vgw1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\",\r\n \
+ \ \"type\": \"Microsoft.Network/virtualNetworkGateways\",\r\n \"location\"\
+ : \"westus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"resourceGuid\": \"9d779263-30f7-4d8d-8039-6d8a421c689a\",\r\n \
+ \ \"ipConfigurations\": [\r\n {\r\n \"name\": \"vnetGatewayConfig0\"\
+ ,\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig0\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip1\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n },\r\n {\r\n \"name\": \"\
+ vnetGatewayConfig1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip2\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n }\r\n ],\r\n \"sku\": {\r\n \
+ \ \"name\": \"HighPerformance\",\r\n \"tier\": \"HighPerformance\"\
+ ,\r\n \"capacity\": 2\r\n },\r\n \"gatewayType\": \"Vpn\",\r\n\
+ \ \"vpnType\": \"RouteBased\",\r\n \"enableBgp\": true,\r\n \"activeActive\"\
+ : true,\r\n \"bgpSettings\": {\r\n \"asn\": 65010,\r\n \"peerWeight\"\
+ : 0\r\n }\r\n }\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:18:18 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['2718']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [402a35e2-164f-11e7-af81-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"name\": \"vgw1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\",\r\n \
+ \ \"type\": \"Microsoft.Network/virtualNetworkGateways\",\r\n \"location\"\
+ : \"westus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"resourceGuid\": \"9d779263-30f7-4d8d-8039-6d8a421c689a\",\r\n \
+ \ \"ipConfigurations\": [\r\n {\r\n \"name\": \"vnetGatewayConfig0\"\
+ ,\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig0\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip1\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n },\r\n {\r\n \"name\": \"\
+ vnetGatewayConfig1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip2\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n }\r\n ],\r\n \"sku\": {\r\n \
+ \ \"name\": \"HighPerformance\",\r\n \"tier\": \"HighPerformance\"\
+ ,\r\n \"capacity\": 2\r\n },\r\n \"gatewayType\": \"Vpn\",\r\n\
+ \ \"vpnType\": \"RouteBased\",\r\n \"enableBgp\": true,\r\n \"activeActive\"\
+ : true,\r\n \"bgpSettings\": {\r\n \"asn\": 65010,\r\n \"peerWeight\"\
+ : 0\r\n }\r\n }\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:18:49 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['2718']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [523d6e52-164f-11e7-ab50-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"name\": \"vgw1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\",\r\n \
+ \ \"type\": \"Microsoft.Network/virtualNetworkGateways\",\r\n \"location\"\
+ : \"westus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"resourceGuid\": \"9d779263-30f7-4d8d-8039-6d8a421c689a\",\r\n \
+ \ \"ipConfigurations\": [\r\n {\r\n \"name\": \"vnetGatewayConfig0\"\
+ ,\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig0\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip1\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n },\r\n {\r\n \"name\": \"\
+ vnetGatewayConfig1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip2\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n }\r\n ],\r\n \"sku\": {\r\n \
+ \ \"name\": \"HighPerformance\",\r\n \"tier\": \"HighPerformance\"\
+ ,\r\n \"capacity\": 2\r\n },\r\n \"gatewayType\": \"Vpn\",\r\n\
+ \ \"vpnType\": \"RouteBased\",\r\n \"enableBgp\": true,\r\n \"activeActive\"\
+ : true,\r\n \"bgpSettings\": {\r\n \"asn\": 65010,\r\n \"peerWeight\"\
+ : 0\r\n }\r\n }\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:19:19 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['2718']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [644d7dcc-164f-11e7-aa99-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"name\": \"vgw1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\",\r\n \
+ \ \"type\": \"Microsoft.Network/virtualNetworkGateways\",\r\n \"location\"\
+ : \"westus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"resourceGuid\": \"9d779263-30f7-4d8d-8039-6d8a421c689a\",\r\n \
+ \ \"ipConfigurations\": [\r\n {\r\n \"name\": \"vnetGatewayConfig0\"\
+ ,\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig0\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip1\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n },\r\n {\r\n \"name\": \"\
+ vnetGatewayConfig1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip2\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n }\r\n ],\r\n \"sku\": {\r\n \
+ \ \"name\": \"HighPerformance\",\r\n \"tier\": \"HighPerformance\"\
+ ,\r\n \"capacity\": 2\r\n },\r\n \"gatewayType\": \"Vpn\",\r\n\
+ \ \"vpnType\": \"RouteBased\",\r\n \"enableBgp\": true,\r\n \"activeActive\"\
+ : true,\r\n \"bgpSettings\": {\r\n \"asn\": 65010,\r\n \"peerWeight\"\
+ : 0\r\n }\r\n }\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:19:49 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['2718']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [767da406-164f-11e7-b5c2-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"name\": \"vgw1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\",\r\n \
+ \ \"type\": \"Microsoft.Network/virtualNetworkGateways\",\r\n \"location\"\
+ : \"westus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"resourceGuid\": \"9d779263-30f7-4d8d-8039-6d8a421c689a\",\r\n \
+ \ \"ipConfigurations\": [\r\n {\r\n \"name\": \"vnetGatewayConfig0\"\
+ ,\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig0\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip1\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n },\r\n {\r\n \"name\": \"\
+ vnetGatewayConfig1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip2\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n }\r\n ],\r\n \"sku\": {\r\n \
+ \ \"name\": \"HighPerformance\",\r\n \"tier\": \"HighPerformance\"\
+ ,\r\n \"capacity\": 2\r\n },\r\n \"gatewayType\": \"Vpn\",\r\n\
+ \ \"vpnType\": \"RouteBased\",\r\n \"enableBgp\": true,\r\n \"activeActive\"\
+ : true,\r\n \"bgpSettings\": {\r\n \"asn\": 65010,\r\n \"peerWeight\"\
+ : 0\r\n }\r\n }\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:20:20 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['2718']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [88bd5d12-164f-11e7-be69-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"name\": \"vgw1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\",\r\n \
+ \ \"type\": \"Microsoft.Network/virtualNetworkGateways\",\r\n \"location\"\
+ : \"westus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"resourceGuid\": \"9d779263-30f7-4d8d-8039-6d8a421c689a\",\r\n \
+ \ \"ipConfigurations\": [\r\n {\r\n \"name\": \"vnetGatewayConfig0\"\
+ ,\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig0\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip1\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n },\r\n {\r\n \"name\": \"\
+ vnetGatewayConfig1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip2\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n }\r\n ],\r\n \"sku\": {\r\n \
+ \ \"name\": \"HighPerformance\",\r\n \"tier\": \"HighPerformance\"\
+ ,\r\n \"capacity\": 2\r\n },\r\n \"gatewayType\": \"Vpn\",\r\n\
+ \ \"vpnType\": \"RouteBased\",\r\n \"enableBgp\": true,\r\n \"activeActive\"\
+ : true,\r\n \"bgpSettings\": {\r\n \"asn\": 65010,\r\n \"peerWeight\"\
+ : 0\r\n }\r\n }\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:20:51 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['2718']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [9acddf9e-164f-11e7-a2a4-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"name\": \"vgw1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\",\r\n \
+ \ \"type\": \"Microsoft.Network/virtualNetworkGateways\",\r\n \"location\"\
+ : \"westus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"resourceGuid\": \"9d779263-30f7-4d8d-8039-6d8a421c689a\",\r\n \
+ \ \"ipConfigurations\": [\r\n {\r\n \"name\": \"vnetGatewayConfig0\"\
+ ,\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig0\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip1\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n },\r\n {\r\n \"name\": \"\
+ vnetGatewayConfig1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip2\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n }\r\n ],\r\n \"sku\": {\r\n \
+ \ \"name\": \"HighPerformance\",\r\n \"tier\": \"HighPerformance\"\
+ ,\r\n \"capacity\": 2\r\n },\r\n \"gatewayType\": \"Vpn\",\r\n\
+ \ \"vpnType\": \"RouteBased\",\r\n \"enableBgp\": true,\r\n \"activeActive\"\
+ : true,\r\n \"bgpSettings\": {\r\n \"asn\": 65010,\r\n \"peerWeight\"\
+ : 0\r\n }\r\n }\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:21:20 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['2718']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [acd84428-164f-11e7-a5f3-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"name\": \"vgw1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\",\r\n \
+ \ \"type\": \"Microsoft.Network/virtualNetworkGateways\",\r\n \"location\"\
+ : \"westus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"resourceGuid\": \"9d779263-30f7-4d8d-8039-6d8a421c689a\",\r\n \
+ \ \"ipConfigurations\": [\r\n {\r\n \"name\": \"vnetGatewayConfig0\"\
+ ,\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig0\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip1\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n },\r\n {\r\n \"name\": \"\
+ vnetGatewayConfig1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip2\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n }\r\n ],\r\n \"sku\": {\r\n \
+ \ \"name\": \"HighPerformance\",\r\n \"tier\": \"HighPerformance\"\
+ ,\r\n \"capacity\": 2\r\n },\r\n \"gatewayType\": \"Vpn\",\r\n\
+ \ \"vpnType\": \"RouteBased\",\r\n \"enableBgp\": true,\r\n \"activeActive\"\
+ : true,\r\n \"bgpSettings\": {\r\n \"asn\": 65010,\r\n \"peerWeight\"\
+ : 0\r\n }\r\n }\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:21:51 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['2718']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [bee6789e-164f-11e7-87b9-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"name\": \"vgw1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\",\r\n \
+ \ \"type\": \"Microsoft.Network/virtualNetworkGateways\",\r\n \"location\"\
+ : \"westus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"resourceGuid\": \"9d779263-30f7-4d8d-8039-6d8a421c689a\",\r\n \
+ \ \"ipConfigurations\": [\r\n {\r\n \"name\": \"vnetGatewayConfig0\"\
+ ,\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig0\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip1\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n },\r\n {\r\n \"name\": \"\
+ vnetGatewayConfig1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip2\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n }\r\n ],\r\n \"sku\": {\r\n \
+ \ \"name\": \"HighPerformance\",\r\n \"tier\": \"HighPerformance\"\
+ ,\r\n \"capacity\": 2\r\n },\r\n \"gatewayType\": \"Vpn\",\r\n\
+ \ \"vpnType\": \"RouteBased\",\r\n \"enableBgp\": true,\r\n \"activeActive\"\
+ : true,\r\n \"bgpSettings\": {\r\n \"asn\": 65010,\r\n \"peerWeight\"\
+ : 0\r\n }\r\n }\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:22:21 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['2718']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [d0f7136c-164f-11e7-b505-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"name\": \"vgw1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\",\r\n \
+ \ \"type\": \"Microsoft.Network/virtualNetworkGateways\",\r\n \"location\"\
+ : \"westus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"resourceGuid\": \"9d779263-30f7-4d8d-8039-6d8a421c689a\",\r\n \
+ \ \"ipConfigurations\": [\r\n {\r\n \"name\": \"vnetGatewayConfig0\"\
+ ,\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig0\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip1\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n },\r\n {\r\n \"name\": \"\
+ vnetGatewayConfig1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip2\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n }\r\n ],\r\n \"sku\": {\r\n \
+ \ \"name\": \"HighPerformance\",\r\n \"tier\": \"HighPerformance\"\
+ ,\r\n \"capacity\": 2\r\n },\r\n \"gatewayType\": \"Vpn\",\r\n\
+ \ \"vpnType\": \"RouteBased\",\r\n \"enableBgp\": true,\r\n \"activeActive\"\
+ : true,\r\n \"bgpSettings\": {\r\n \"asn\": 65010,\r\n \"peerWeight\"\
+ : 0\r\n }\r\n }\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:22:52 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['2718']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [e3342d26-164f-11e7-bf45-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"name\": \"vgw1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\",\r\n \
+ \ \"type\": \"Microsoft.Network/virtualNetworkGateways\",\r\n \"location\"\
+ : \"westus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"resourceGuid\": \"9d779263-30f7-4d8d-8039-6d8a421c689a\",\r\n \
+ \ \"ipConfigurations\": [\r\n {\r\n \"name\": \"vnetGatewayConfig0\"\
+ ,\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig0\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip1\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n },\r\n {\r\n \"name\": \"\
+ vnetGatewayConfig1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip2\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n }\r\n ],\r\n \"sku\": {\r\n \
+ \ \"name\": \"HighPerformance\",\r\n \"tier\": \"HighPerformance\"\
+ ,\r\n \"capacity\": 2\r\n },\r\n \"gatewayType\": \"Vpn\",\r\n\
+ \ \"vpnType\": \"RouteBased\",\r\n \"enableBgp\": true,\r\n \"activeActive\"\
+ : true,\r\n \"bgpSettings\": {\r\n \"asn\": 65010,\r\n \"peerWeight\"\
+ : 0\r\n }\r\n }\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:23:22 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['2718']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [f5643bc0-164f-11e7-b38b-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"name\": \"vgw1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\",\r\n \
+ \ \"type\": \"Microsoft.Network/virtualNetworkGateways\",\r\n \"location\"\
+ : \"westus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"resourceGuid\": \"9d779263-30f7-4d8d-8039-6d8a421c689a\",\r\n \
+ \ \"ipConfigurations\": [\r\n {\r\n \"name\": \"vnetGatewayConfig0\"\
+ ,\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig0\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip1\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n },\r\n {\r\n \"name\": \"\
+ vnetGatewayConfig1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip2\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n }\r\n ],\r\n \"sku\": {\r\n \
+ \ \"name\": \"HighPerformance\",\r\n \"tier\": \"HighPerformance\"\
+ ,\r\n \"capacity\": 2\r\n },\r\n \"gatewayType\": \"Vpn\",\r\n\
+ \ \"vpnType\": \"RouteBased\",\r\n \"enableBgp\": true,\r\n \"activeActive\"\
+ : true,\r\n \"bgpSettings\": {\r\n \"asn\": 65010,\r\n \"peerWeight\"\
+ : 0\r\n }\r\n }\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:23:53 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['2718']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [078faba2-1650-11e7-91b7-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"name\": \"vgw1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\",\r\n \
+ \ \"type\": \"Microsoft.Network/virtualNetworkGateways\",\r\n \"location\"\
+ : \"westus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"resourceGuid\": \"9d779263-30f7-4d8d-8039-6d8a421c689a\",\r\n \
+ \ \"ipConfigurations\": [\r\n {\r\n \"name\": \"vnetGatewayConfig0\"\
+ ,\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig0\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip1\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n },\r\n {\r\n \"name\": \"\
+ vnetGatewayConfig1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip2\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n }\r\n ],\r\n \"sku\": {\r\n \
+ \ \"name\": \"HighPerformance\",\r\n \"tier\": \"HighPerformance\"\
+ ,\r\n \"capacity\": 2\r\n },\r\n \"gatewayType\": \"Vpn\",\r\n\
+ \ \"vpnType\": \"RouteBased\",\r\n \"enableBgp\": true,\r\n \"activeActive\"\
+ : true,\r\n \"bgpSettings\": {\r\n \"asn\": 65010,\r\n \"peerWeight\"\
+ : 0\r\n }\r\n }\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:24:23 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['2718']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [19c18fb6-1650-11e7-8094-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"name\": \"vgw1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\",\r\n \
+ \ \"type\": \"Microsoft.Network/virtualNetworkGateways\",\r\n \"location\"\
+ : \"westus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"resourceGuid\": \"9d779263-30f7-4d8d-8039-6d8a421c689a\",\r\n \
+ \ \"ipConfigurations\": [\r\n {\r\n \"name\": \"vnetGatewayConfig0\"\
+ ,\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig0\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip1\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n },\r\n {\r\n \"name\": \"\
+ vnetGatewayConfig1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip2\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n }\r\n ],\r\n \"sku\": {\r\n \
+ \ \"name\": \"HighPerformance\",\r\n \"tier\": \"HighPerformance\"\
+ ,\r\n \"capacity\": 2\r\n },\r\n \"gatewayType\": \"Vpn\",\r\n\
+ \ \"vpnType\": \"RouteBased\",\r\n \"enableBgp\": true,\r\n \"activeActive\"\
+ : true,\r\n \"bgpSettings\": {\r\n \"asn\": 65010,\r\n \"peerWeight\"\
+ : 0\r\n }\r\n }\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:24:54 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['2718']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [2bd071f8-1650-11e7-87cc-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"name\": \"vgw1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\",\r\n \
+ \ \"type\": \"Microsoft.Network/virtualNetworkGateways\",\r\n \"location\"\
+ : \"westus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"resourceGuid\": \"9d779263-30f7-4d8d-8039-6d8a421c689a\",\r\n \
+ \ \"ipConfigurations\": [\r\n {\r\n \"name\": \"vnetGatewayConfig0\"\
+ ,\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig0\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip1\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n },\r\n {\r\n \"name\": \"\
+ vnetGatewayConfig1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip2\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n }\r\n ],\r\n \"sku\": {\r\n \
+ \ \"name\": \"HighPerformance\",\r\n \"tier\": \"HighPerformance\"\
+ ,\r\n \"capacity\": 2\r\n },\r\n \"gatewayType\": \"Vpn\",\r\n\
+ \ \"vpnType\": \"RouteBased\",\r\n \"enableBgp\": true,\r\n \"activeActive\"\
+ : true,\r\n \"bgpSettings\": {\r\n \"asn\": 65010,\r\n \"peerWeight\"\
+ : 0\r\n }\r\n }\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:25:24 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['2718']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [3de681e6-1650-11e7-b6a1-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"name\": \"vgw1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\",\r\n \
+ \ \"type\": \"Microsoft.Network/virtualNetworkGateways\",\r\n \"location\"\
+ : \"westus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"resourceGuid\": \"9d779263-30f7-4d8d-8039-6d8a421c689a\",\r\n \
+ \ \"ipConfigurations\": [\r\n {\r\n \"name\": \"vnetGatewayConfig0\"\
+ ,\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig0\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip1\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n },\r\n {\r\n \"name\": \"\
+ vnetGatewayConfig1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip2\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n }\r\n ],\r\n \"sku\": {\r\n \
+ \ \"name\": \"HighPerformance\",\r\n \"tier\": \"HighPerformance\"\
+ ,\r\n \"capacity\": 2\r\n },\r\n \"gatewayType\": \"Vpn\",\r\n\
+ \ \"vpnType\": \"RouteBased\",\r\n \"enableBgp\": true,\r\n \"activeActive\"\
+ : true,\r\n \"bgpSettings\": {\r\n \"asn\": 65010,\r\n \"peerWeight\"\
+ : 0\r\n }\r\n }\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:25:55 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['2718']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [5021683e-1650-11e7-9515-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"name\": \"vgw1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\",\r\n \
+ \ \"type\": \"Microsoft.Network/virtualNetworkGateways\",\r\n \"location\"\
+ : \"westus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"resourceGuid\": \"9d779263-30f7-4d8d-8039-6d8a421c689a\",\r\n \
+ \ \"ipConfigurations\": [\r\n {\r\n \"name\": \"vnetGatewayConfig0\"\
+ ,\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig0\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip1\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n },\r\n {\r\n \"name\": \"\
+ vnetGatewayConfig1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip2\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n }\r\n ],\r\n \"sku\": {\r\n \
+ \ \"name\": \"HighPerformance\",\r\n \"tier\": \"HighPerformance\"\
+ ,\r\n \"capacity\": 2\r\n },\r\n \"gatewayType\": \"Vpn\",\r\n\
+ \ \"vpnType\": \"RouteBased\",\r\n \"enableBgp\": true,\r\n \"activeActive\"\
+ : true,\r\n \"bgpSettings\": {\r\n \"asn\": 65010,\r\n \"peerWeight\"\
+ : 0\r\n }\r\n }\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:26:25 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['2718']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [6258d4f4-1650-11e7-aa4b-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"name\": \"vgw1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\",\r\n \
+ \ \"type\": \"Microsoft.Network/virtualNetworkGateways\",\r\n \"location\"\
+ : \"westus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"resourceGuid\": \"9d779263-30f7-4d8d-8039-6d8a421c689a\",\r\n \
+ \ \"ipConfigurations\": [\r\n {\r\n \"name\": \"vnetGatewayConfig0\"\
+ ,\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig0\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip1\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n },\r\n {\r\n \"name\": \"\
+ vnetGatewayConfig1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip2\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n }\r\n ],\r\n \"sku\": {\r\n \
+ \ \"name\": \"HighPerformance\",\r\n \"tier\": \"HighPerformance\"\
+ ,\r\n \"capacity\": 2\r\n },\r\n \"gatewayType\": \"Vpn\",\r\n\
+ \ \"vpnType\": \"RouteBased\",\r\n \"enableBgp\": true,\r\n \"activeActive\"\
+ : true,\r\n \"bgpSettings\": {\r\n \"asn\": 65010,\r\n \"peerWeight\"\
+ : 0\r\n }\r\n }\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:26:55 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['2718']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [74944a9c-1650-11e7-b0fb-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"name\": \"vgw1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\",\r\n \
+ \ \"type\": \"Microsoft.Network/virtualNetworkGateways\",\r\n \"location\"\
+ : \"westus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"resourceGuid\": \"9d779263-30f7-4d8d-8039-6d8a421c689a\",\r\n \
+ \ \"ipConfigurations\": [\r\n {\r\n \"name\": \"vnetGatewayConfig0\"\
+ ,\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig0\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip1\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n },\r\n {\r\n \"name\": \"\
+ vnetGatewayConfig1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip2\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n }\r\n ],\r\n \"sku\": {\r\n \
+ \ \"name\": \"HighPerformance\",\r\n \"tier\": \"HighPerformance\"\
+ ,\r\n \"capacity\": 2\r\n },\r\n \"gatewayType\": \"Vpn\",\r\n\
+ \ \"vpnType\": \"RouteBased\",\r\n \"enableBgp\": true,\r\n \"activeActive\"\
+ : true,\r\n \"bgpSettings\": {\r\n \"asn\": 65010,\r\n \"peerWeight\"\
+ : 0\r\n }\r\n }\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:27:26 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['2718']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [86ae2ffa-1650-11e7-99d5-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"name\": \"vgw1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\",\r\n \
+ \ \"type\": \"Microsoft.Network/virtualNetworkGateways\",\r\n \"location\"\
+ : \"westus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"resourceGuid\": \"9d779263-30f7-4d8d-8039-6d8a421c689a\",\r\n \
+ \ \"ipConfigurations\": [\r\n {\r\n \"name\": \"vnetGatewayConfig0\"\
+ ,\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig0\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip1\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n },\r\n {\r\n \"name\": \"\
+ vnetGatewayConfig1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip2\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n }\r\n ],\r\n \"sku\": {\r\n \
+ \ \"name\": \"HighPerformance\",\r\n \"tier\": \"HighPerformance\"\
+ ,\r\n \"capacity\": 2\r\n },\r\n \"gatewayType\": \"Vpn\",\r\n\
+ \ \"vpnType\": \"RouteBased\",\r\n \"enableBgp\": true,\r\n \"activeActive\"\
+ : true,\r\n \"bgpSettings\": {\r\n \"asn\": 65010,\r\n \"peerWeight\"\
+ : 0\r\n }\r\n }\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:27:57 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['2718']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [991a9622-1650-11e7-843e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"name\": \"vgw1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\",\r\n \
+ \ \"type\": \"Microsoft.Network/virtualNetworkGateways\",\r\n \"location\"\
+ : \"westus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"resourceGuid\": \"9d779263-30f7-4d8d-8039-6d8a421c689a\",\r\n \
+ \ \"ipConfigurations\": [\r\n {\r\n \"name\": \"vnetGatewayConfig0\"\
+ ,\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig0\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip1\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n },\r\n {\r\n \"name\": \"\
+ vnetGatewayConfig1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip2\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n }\r\n ],\r\n \"sku\": {\r\n \
+ \ \"name\": \"HighPerformance\",\r\n \"tier\": \"HighPerformance\"\
+ ,\r\n \"capacity\": 2\r\n },\r\n \"gatewayType\": \"Vpn\",\r\n\
+ \ \"vpnType\": \"RouteBased\",\r\n \"enableBgp\": true,\r\n \"activeActive\"\
+ : true,\r\n \"bgpSettings\": {\r\n \"asn\": 65010,\r\n \"peerWeight\"\
+ : 0\r\n }\r\n }\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:28:28 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['2718']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [ab538970-1650-11e7-8627-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"name\": \"vgw1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\",\r\n \
+ \ \"type\": \"Microsoft.Network/virtualNetworkGateways\",\r\n \"location\"\
+ : \"westus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"resourceGuid\": \"9d779263-30f7-4d8d-8039-6d8a421c689a\",\r\n \
+ \ \"ipConfigurations\": [\r\n {\r\n \"name\": \"vnetGatewayConfig0\"\
+ ,\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig0\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip1\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n },\r\n {\r\n \"name\": \"\
+ vnetGatewayConfig1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip2\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n }\r\n ],\r\n \"sku\": {\r\n \
+ \ \"name\": \"HighPerformance\",\r\n \"tier\": \"HighPerformance\"\
+ ,\r\n \"capacity\": 2\r\n },\r\n \"gatewayType\": \"Vpn\",\r\n\
+ \ \"vpnType\": \"RouteBased\",\r\n \"enableBgp\": true,\r\n \"activeActive\"\
+ : true,\r\n \"bgpSettings\": {\r\n \"asn\": 65010,\r\n \"peerWeight\"\
+ : 0\r\n }\r\n }\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:28:58 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['2718']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [bd6776e8-1650-11e7-8a7e-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"name\": \"vgw1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\",\r\n \
+ \ \"type\": \"Microsoft.Network/virtualNetworkGateways\",\r\n \"location\"\
+ : \"westus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"resourceGuid\": \"9d779263-30f7-4d8d-8039-6d8a421c689a\",\r\n \
+ \ \"ipConfigurations\": [\r\n {\r\n \"name\": \"vnetGatewayConfig0\"\
+ ,\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig0\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip1\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n },\r\n {\r\n \"name\": \"\
+ vnetGatewayConfig1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip2\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n }\r\n ],\r\n \"sku\": {\r\n \
+ \ \"name\": \"HighPerformance\",\r\n \"tier\": \"HighPerformance\"\
+ ,\r\n \"capacity\": 2\r\n },\r\n \"gatewayType\": \"Vpn\",\r\n\
+ \ \"vpnType\": \"RouteBased\",\r\n \"enableBgp\": true,\r\n \"activeActive\"\
+ : true,\r\n \"bgpSettings\": {\r\n \"asn\": 65010,\r\n \"peerWeight\"\
+ : 0\r\n }\r\n }\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:29:28 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['2718']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [cf72fc8a-1650-11e7-a8d7-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"name\": \"vgw1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\",\r\n \
+ \ \"type\": \"Microsoft.Network/virtualNetworkGateways\",\r\n \"location\"\
+ : \"westus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"resourceGuid\": \"9d779263-30f7-4d8d-8039-6d8a421c689a\",\r\n \
+ \ \"ipConfigurations\": [\r\n {\r\n \"name\": \"vnetGatewayConfig0\"\
+ ,\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig0\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip1\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n },\r\n {\r\n \"name\": \"\
+ vnetGatewayConfig1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip2\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n }\r\n ],\r\n \"sku\": {\r\n \
+ \ \"name\": \"HighPerformance\",\r\n \"tier\": \"HighPerformance\"\
+ ,\r\n \"capacity\": 2\r\n },\r\n \"gatewayType\": \"Vpn\",\r\n\
+ \ \"vpnType\": \"RouteBased\",\r\n \"enableBgp\": true,\r\n \"activeActive\"\
+ : true,\r\n \"bgpSettings\": {\r\n \"asn\": 65010,\r\n \"peerWeight\"\
+ : 0\r\n }\r\n }\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:29:58 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['2718']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [e18a174a-1650-11e7-9b22-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"name\": \"vgw1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\",\r\n \
+ \ \"type\": \"Microsoft.Network/virtualNetworkGateways\",\r\n \"location\"\
+ : \"westus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"resourceGuid\": \"9d779263-30f7-4d8d-8039-6d8a421c689a\",\r\n \
+ \ \"ipConfigurations\": [\r\n {\r\n \"name\": \"vnetGatewayConfig0\"\
+ ,\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig0\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip1\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n },\r\n {\r\n \"name\": \"\
+ vnetGatewayConfig1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip2\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n }\r\n ],\r\n \"sku\": {\r\n \
+ \ \"name\": \"HighPerformance\",\r\n \"tier\": \"HighPerformance\"\
+ ,\r\n \"capacity\": 2\r\n },\r\n \"gatewayType\": \"Vpn\",\r\n\
+ \ \"vpnType\": \"RouteBased\",\r\n \"enableBgp\": true,\r\n \"activeActive\"\
+ : true,\r\n \"bgpSettings\": {\r\n \"asn\": 65010,\r\n \"peerWeight\"\
+ : 0\r\n }\r\n }\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:30:28 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['2718']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [f38e517a-1650-11e7-99c8-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"name\": \"vgw1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\",\r\n \
+ \ \"type\": \"Microsoft.Network/virtualNetworkGateways\",\r\n \"location\"\
+ : \"westus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"resourceGuid\": \"9d779263-30f7-4d8d-8039-6d8a421c689a\",\r\n \
+ \ \"ipConfigurations\": [\r\n {\r\n \"name\": \"vnetGatewayConfig0\"\
+ ,\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig0\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip1\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n },\r\n {\r\n \"name\": \"\
+ vnetGatewayConfig1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip2\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n }\r\n ],\r\n \"sku\": {\r\n \
+ \ \"name\": \"HighPerformance\",\r\n \"tier\": \"HighPerformance\"\
+ ,\r\n \"capacity\": 2\r\n },\r\n \"gatewayType\": \"Vpn\",\r\n\
+ \ \"vpnType\": \"RouteBased\",\r\n \"enableBgp\": true,\r\n \"activeActive\"\
+ : true,\r\n \"bgpSettings\": {\r\n \"asn\": 65010,\r\n \"peerWeight\"\
+ : 0\r\n }\r\n }\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:30:59 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['2718']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [05ac0da2-1651-11e7-a083-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"name\": \"vgw1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\",\r\n \
+ \ \"type\": \"Microsoft.Network/virtualNetworkGateways\",\r\n \"location\"\
+ : \"westus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"resourceGuid\": \"9d779263-30f7-4d8d-8039-6d8a421c689a\",\r\n \
+ \ \"ipConfigurations\": [\r\n {\r\n \"name\": \"vnetGatewayConfig0\"\
+ ,\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig0\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip1\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n },\r\n {\r\n \"name\": \"\
+ vnetGatewayConfig1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip2\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n }\r\n ],\r\n \"sku\": {\r\n \
+ \ \"name\": \"HighPerformance\",\r\n \"tier\": \"HighPerformance\"\
+ ,\r\n \"capacity\": 2\r\n },\r\n \"gatewayType\": \"Vpn\",\r\n\
+ \ \"vpnType\": \"RouteBased\",\r\n \"enableBgp\": true,\r\n \"activeActive\"\
+ : true,\r\n \"bgpSettings\": {\r\n \"asn\": 65010,\r\n \"peerWeight\"\
+ : 0\r\n }\r\n }\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:31:30 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['2718']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [17b7ccec-1651-11e7-b251-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"name\": \"vgw1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\",\r\n \
+ \ \"type\": \"Microsoft.Network/virtualNetworkGateways\",\r\n \"location\"\
+ : \"westus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"resourceGuid\": \"9d779263-30f7-4d8d-8039-6d8a421c689a\",\r\n \
+ \ \"ipConfigurations\": [\r\n {\r\n \"name\": \"vnetGatewayConfig0\"\
+ ,\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig0\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip1\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n },\r\n {\r\n \"name\": \"\
+ vnetGatewayConfig1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip2\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n }\r\n ],\r\n \"sku\": {\r\n \
+ \ \"name\": \"HighPerformance\",\r\n \"tier\": \"HighPerformance\"\
+ ,\r\n \"capacity\": 2\r\n },\r\n \"gatewayType\": \"Vpn\",\r\n\
+ \ \"vpnType\": \"RouteBased\",\r\n \"enableBgp\": true,\r\n \"activeActive\"\
+ : true,\r\n \"bgpSettings\": {\r\n \"asn\": 65010,\r\n \"peerWeight\"\
+ : 0\r\n }\r\n }\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:32:00 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['2718']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [29d5edb4-1651-11e7-9e9b-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"name\": \"vgw1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\",\r\n \
+ \ \"type\": \"Microsoft.Network/virtualNetworkGateways\",\r\n \"location\"\
+ : \"westus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"resourceGuid\": \"9d779263-30f7-4d8d-8039-6d8a421c689a\",\r\n \
+ \ \"ipConfigurations\": [\r\n {\r\n \"name\": \"vnetGatewayConfig0\"\
+ ,\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig0\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip1\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n },\r\n {\r\n \"name\": \"\
+ vnetGatewayConfig1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip2\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n }\r\n ],\r\n \"sku\": {\r\n \
+ \ \"name\": \"HighPerformance\",\r\n \"tier\": \"HighPerformance\"\
+ ,\r\n \"capacity\": 2\r\n },\r\n \"gatewayType\": \"Vpn\",\r\n\
+ \ \"vpnType\": \"RouteBased\",\r\n \"enableBgp\": true,\r\n \"activeActive\"\
+ : true,\r\n \"bgpSettings\": {\r\n \"asn\": 65010,\r\n \"peerWeight\"\
+ : 0\r\n }\r\n }\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:32:30 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['2718']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [3bdda6b6-1651-11e7-a5b9-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"name\": \"vgw1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\",\r\n \
+ \ \"type\": \"Microsoft.Network/virtualNetworkGateways\",\r\n \"location\"\
+ : \"westus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"resourceGuid\": \"9d779263-30f7-4d8d-8039-6d8a421c689a\",\r\n \
+ \ \"ipConfigurations\": [\r\n {\r\n \"name\": \"vnetGatewayConfig0\"\
+ ,\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig0\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip1\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n },\r\n {\r\n \"name\": \"\
+ vnetGatewayConfig1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip2\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n }\r\n ],\r\n \"sku\": {\r\n \
+ \ \"name\": \"HighPerformance\",\r\n \"tier\": \"HighPerformance\"\
+ ,\r\n \"capacity\": 2\r\n },\r\n \"gatewayType\": \"Vpn\",\r\n\
+ \ \"vpnType\": \"RouteBased\",\r\n \"enableBgp\": true,\r\n \"activeActive\"\
+ : true,\r\n \"bgpSettings\": {\r\n \"asn\": 65010,\r\n \"peerWeight\"\
+ : 0\r\n }\r\n }\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:33:00 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['2718']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [4dea6046-1651-11e7-8906-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"name\": \"vgw1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\",\r\n \
+ \ \"type\": \"Microsoft.Network/virtualNetworkGateways\",\r\n \"location\"\
+ : \"westus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"resourceGuid\": \"9d779263-30f7-4d8d-8039-6d8a421c689a\",\r\n \
+ \ \"ipConfigurations\": [\r\n {\r\n \"name\": \"vnetGatewayConfig0\"\
+ ,\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig0\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip1\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n },\r\n {\r\n \"name\": \"\
+ vnetGatewayConfig1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig1\"\
+ ,\r\n \"etag\": \"W/\\\"7f367f89-818a-49d6-9ce8-7be087d4f4ff\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Updating\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip2\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n }\r\n ],\r\n \"sku\": {\r\n \
+ \ \"name\": \"HighPerformance\",\r\n \"tier\": \"HighPerformance\"\
+ ,\r\n \"capacity\": 2\r\n },\r\n \"gatewayType\": \"Vpn\",\r\n\
+ \ \"vpnType\": \"RouteBased\",\r\n \"enableBgp\": true,\r\n \"activeActive\"\
+ : true,\r\n \"bgpSettings\": {\r\n \"asn\": 65010,\r\n \"peerWeight\"\
+ : 0\r\n }\r\n }\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:33:30 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['2718']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [5ff4f700-1651-11e7-8c0c-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"name\": \"vgw1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1\"\
+ ,\r\n \"etag\": \"W/\\\"3cb4e036-ef14-428c-8187-2896dcda8bbe\\\"\",\r\n \
+ \ \"type\": \"Microsoft.Network/virtualNetworkGateways\",\r\n \"location\"\
+ : \"westus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Succeeded\"\
+ ,\r\n \"resourceGuid\": \"9d779263-30f7-4d8d-8039-6d8a421c689a\",\r\n \
+ \ \"ipConfigurations\": [\r\n {\r\n \"name\": \"vnetGatewayConfig0\"\
+ ,\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig0\"\
+ ,\r\n \"etag\": \"W/\\\"3cb4e036-ef14-428c-8187-2896dcda8bbe\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Succeeded\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip1\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n },\r\n {\r\n \"name\": \"\
+ vnetGatewayConfig1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1/ipConfigurations/vnetGatewayConfig1\"\
+ ,\r\n \"etag\": \"W/\\\"3cb4e036-ef14-428c-8187-2896dcda8bbe\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Succeeded\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw1ip2\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet1/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n }\r\n ],\r\n \"sku\": {\r\n \
+ \ \"name\": \"HighPerformance\",\r\n \"tier\": \"HighPerformance\"\
+ ,\r\n \"capacity\": 2\r\n },\r\n \"gatewayType\": \"Vpn\",\r\n\
+ \ \"vpnType\": \"RouteBased\",\r\n \"enableBgp\": true,\r\n \"activeActive\"\
+ : true,\r\n \"bgpSettings\": {\r\n \"asn\": 65010,\r\n \"bgpPeeringAddress\"\
+ : \"10.21.255.4,10.21.255.5\",\r\n \"peerWeight\": 0\r\n }\r\n }\r\
+ \n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:34:01 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['2776']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [60836d40-1651-11e7-83fa-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw2?api-version=2016-09-01
+ response:
+ body: {string: "{\r\n \"name\": \"vgw2\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw2\"\
+ ,\r\n \"etag\": \"W/\\\"7b1d7895-aa6e-4c29-b59d-64847d39090b\\\"\",\r\n \
+ \ \"type\": \"Microsoft.Network/virtualNetworkGateways\",\r\n \"location\"\
+ : \"westus\",\r\n \"properties\": {\r\n \"provisioningState\": \"Succeeded\"\
+ ,\r\n \"resourceGuid\": \"97c54646-d873-475c-9bef-da41edb4c0fc\",\r\n \
+ \ \"ipConfigurations\": [\r\n {\r\n \"name\": \"vnetGatewayConfig0\"\
+ ,\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw2/ipConfigurations/vnetGatewayConfig0\"\
+ ,\r\n \"etag\": \"W/\\\"7b1d7895-aa6e-4c29-b59d-64847d39090b\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Succeeded\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw2ip1\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet2/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n },\r\n {\r\n \"name\": \"\
+ vnetGatewayConfig1\",\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw2/ipConfigurations/vnetGatewayConfig1\"\
+ ,\r\n \"etag\": \"W/\\\"7b1d7895-aa6e-4c29-b59d-64847d39090b\\\"\"\
+ ,\r\n \"properties\": {\r\n \"provisioningState\": \"Succeeded\"\
+ ,\r\n \"privateIPAllocationMethod\": \"Dynamic\",\r\n \"\
+ publicIPAddress\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/publicIPAddresses/gw2ip2\"\
+ \r\n },\r\n \"subnet\": {\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworks/vnet2/subnets/GatewaySubnet\"\
+ \r\n }\r\n }\r\n }\r\n ],\r\n \"sku\": {\r\n \
+ \ \"name\": \"HighPerformance\",\r\n \"tier\": \"HighPerformance\"\
+ ,\r\n \"capacity\": 2\r\n },\r\n \"gatewayType\": \"Vpn\",\r\n\
+ \ \"vpnType\": \"RouteBased\",\r\n \"enableBgp\": true,\r\n \"activeActive\"\
+ : true,\r\n \"bgpSettings\": {\r\n \"asn\": 65020,\r\n \"bgpPeeringAddress\"\
+ : \"10.22.255.4,10.22.255.5\",\r\n \"peerWeight\": 0\r\n }\r\n }\r\
+ \n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:34:01 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['2776']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 resourcemanagementclient/0.30.2 Azure-SDK-For-Python
+ AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [611d12f0-1651-11e7-9be4-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_active_active_vnet_vnet_connection?api-version=2016-09-01
+ response:
+ body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection","name":"cli_test_active_active_vnet_vnet_connection","location":"westus","tags":{"use":"az-test"},"properties":{"provisioningState":"Succeeded"}}'}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:34:03 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Vary: [Accept-Encoding]
+ content-length: ['276']
+ status: {code: 200, message: OK}
+- request:
+ body: '{"properties": {"template": {"variables": {}, "resources": [{"dependsOn":
+ [], "properties": {"enableBgp": true, "virtualNetworkGateway2": {"id": "/subscriptions/0b1f6471-1bf0-4dda-aec3-cb9272f09590/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw2"},
+ "virtualNetworkGateway1": {"id": "/subscriptions/0b1f6471-1bf0-4dda-aec3-cb9272f09590/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1"},
+ "authorizationKey": null, "sharedKey": "abc123", "connectionType": "Vnet2Vnet",
+ "routingWeight": 10}, "name": "vnet1to2", "apiVersion": "2015-06-15", "tags":
+ {}, "location": "westus", "type": "Microsoft.Network/connections"}], "outputs":
+ {"resource": {"value": "[reference(''vnet1to2'')]", "type": "object"}}, "$schema":
+ "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#",
+ "contentVersion": "1.0.0.0", "parameters": {}}, "mode": "Incremental", "parameters":
+ {}}}'
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Length: ['1023']
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 resourcemanagementclient/0.30.2 Azure-SDK-For-Python
+ AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [6139c0da-1651-11e7-84ae-a0b3ccf7272a]
+ method: PUT
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Resources/deployments/mock-deployment?api-version=2016-09-01
+ response:
+ body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Resources/deployments/vpn_connection_deploy_EdOl8TQj5WceeQu26GMlub0qoiBA6Aiw","name":"vpn_connection_deploy_EdOl8TQj5WceeQu26GMlub0qoiBA6Aiw","properties":{"templateHash":"14700326768216073025","parameters":{},"mode":"Incremental","provisioningState":"Accepted","timestamp":"2017-03-31T20:34:04.9195607Z","duration":"PT0.5022841S","correlationId":"2170b34a-bc4a-460b-b469-cbd1fde7e82f","providers":[{"namespace":"Microsoft.Network","resourceTypes":[{"resourceType":"connections","locations":["westus"]}]}],"dependencies":[]}}'}
+ headers:
+ Azure-AsyncOperation: ['https://management.azure.com/subscriptions/0b1f6471-1bf0-4dda-aec3-cb9272f09590/resourcegroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Resources/deployments/vpn_connection_deploy_EdOl8TQj5WceeQu26GMlub0qoiBA6Aiw/operationStatuses/08587106144410603361?api-version=2016-09-01']
+ Cache-Control: [no-cache]
+ Content-Length: ['669']
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:34:04 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ x-ms-ratelimit-remaining-subscription-writes: ['1196']
+ status: {code: 201, message: Created}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 resourcemanagementclient/0.30.2 Azure-SDK-For-Python
+ AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [6139c0da-1651-11e7-84ae-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Resources/deployments/mock-deployment/operationStatuses/08587106144410603361?api-version=2016-09-01
+ response:
+ body: {string: '{"status":"Succeeded"}'}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:34:35 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Vary: [Accept-Encoding]
+ content-length: ['22']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 resourcemanagementclient/0.30.2 Azure-SDK-For-Python
+ AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [6139c0da-1651-11e7-84ae-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Resources/deployments/mock-deployment?api-version=2016-09-01
+ response:
+ body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Resources/deployments/vpn_connection_deploy_EdOl8TQj5WceeQu26GMlub0qoiBA6Aiw","name":"vpn_connection_deploy_EdOl8TQj5WceeQu26GMlub0qoiBA6Aiw","properties":{"templateHash":"14700326768216073025","parameters":{},"mode":"Incremental","provisioningState":"Succeeded","timestamp":"2017-03-31T20:34:34.8409212Z","duration":"PT30.4236446S","correlationId":"2170b34a-bc4a-460b-b469-cbd1fde7e82f","providers":[{"namespace":"Microsoft.Network","resourceTypes":[{"resourceType":"connections","locations":["westus"]}]}],"dependencies":[],"outputs":{"resource":{"type":"Object","value":{"provisioningState":"Succeeded","resourceGuid":"40aaaac0-fe2f-45a6-8078-1b22070426ff","virtualNetworkGateway1":{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1"},"virtualNetworkGateway2":{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw2"},"connectionType":"Vnet2Vnet","routingWeight":10,"sharedKey":"abc123","enableBgp":true,"connectionStatus":"Unknown","ingressBytesTransferred":0,"egressBytesTransferred":0}}},"outputResources":[{"id":"Microsoft.Network/connections/vnet1to2"}]}}'}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:34:35 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Vary: [Accept-Encoding]
+ content-length: ['1460']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 resourcemanagementclient/0.30.2 Azure-SDK-For-Python
+ AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [74abb108-1651-11e7-9ed6-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_active_active_vnet_vnet_connection?api-version=2016-09-01
+ response:
+ body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection","name":"cli_test_active_active_vnet_vnet_connection","location":"westus","tags":{"use":"az-test"},"properties":{"provisioningState":"Succeeded"}}'}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:34:36 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Vary: [Accept-Encoding]
+ content-length: ['276']
+ status: {code: 200, message: OK}
+- request:
+ body: '{"properties": {"template": {"variables": {}, "resources": [{"dependsOn":
+ [], "properties": {"enableBgp": true, "virtualNetworkGateway2": {"id": "/subscriptions/0b1f6471-1bf0-4dda-aec3-cb9272f09590/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1"},
+ "virtualNetworkGateway1": {"id": "/subscriptions/0b1f6471-1bf0-4dda-aec3-cb9272f09590/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw2"},
+ "authorizationKey": null, "sharedKey": "abc123", "connectionType": "Vnet2Vnet",
+ "routingWeight": 10}, "name": "vnet2to1", "apiVersion": "2015-06-15", "tags":
+ {}, "location": "westus", "type": "Microsoft.Network/connections"}], "outputs":
+ {"resource": {"value": "[reference(''vnet2to1'')]", "type": "object"}}, "$schema":
+ "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#",
+ "contentVersion": "1.0.0.0", "parameters": {}}, "mode": "Incremental", "parameters":
+ {}}}'
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Length: ['1023']
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 resourcemanagementclient/0.30.2 Azure-SDK-For-Python
+ AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [74cab64a-1651-11e7-aede-a0b3ccf7272a]
+ method: PUT
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Resources/deployments/mock-deployment?api-version=2016-09-01
+ response:
+ body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Resources/deployments/vpn_connection_deploy_4jZyDcCkNn0VRjbpCZRXO0oEjPEsbuii","name":"vpn_connection_deploy_4jZyDcCkNn0VRjbpCZRXO0oEjPEsbuii","properties":{"templateHash":"773591391528351821","parameters":{},"mode":"Incremental","provisioningState":"Accepted","timestamp":"2017-03-31T20:34:37.5205546Z","duration":"PT0.3255211S","correlationId":"6403380a-6db3-4483-96b0-47128f81da59","providers":[{"namespace":"Microsoft.Network","resourceTypes":[{"resourceType":"connections","locations":["westus"]}]}],"dependencies":[]}}'}
+ headers:
+ Azure-AsyncOperation: ['https://management.azure.com/subscriptions/0b1f6471-1bf0-4dda-aec3-cb9272f09590/resourcegroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Resources/deployments/vpn_connection_deploy_4jZyDcCkNn0VRjbpCZRXO0oEjPEsbuii/operationStatuses/08587106144082825840?api-version=2016-09-01']
+ Cache-Control: [no-cache]
+ Content-Length: ['667']
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:34:36 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ x-ms-ratelimit-remaining-subscription-writes: ['1198']
+ status: {code: 201, message: Created}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 resourcemanagementclient/0.30.2 Azure-SDK-For-Python
+ AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [74cab64a-1651-11e7-aede-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Resources/deployments/mock-deployment/operationStatuses/08587106144082825840?api-version=2016-09-01
+ response:
+ body: {string: '{"status":"Running"}'}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:35:07 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Vary: [Accept-Encoding]
+ content-length: ['20']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 resourcemanagementclient/0.30.2 Azure-SDK-For-Python
+ AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [74cab64a-1651-11e7-aede-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Resources/deployments/mock-deployment/operationStatuses/08587106144082825840?api-version=2016-09-01
+ response:
+ body: {string: '{"status":"Succeeded"}'}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:35:37 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Vary: [Accept-Encoding]
+ content-length: ['22']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 resourcemanagementclient/0.30.2 Azure-SDK-For-Python
+ AZURECLI/TEST/2.0.1+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [74cab64a-1651-11e7-aede-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Resources/deployments/mock-deployment?api-version=2016-09-01
+ response:
+ body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Resources/deployments/vpn_connection_deploy_4jZyDcCkNn0VRjbpCZRXO0oEjPEsbuii","name":"vpn_connection_deploy_4jZyDcCkNn0VRjbpCZRXO0oEjPEsbuii","properties":{"templateHash":"773591391528351821","parameters":{},"mode":"Incremental","provisioningState":"Succeeded","timestamp":"2017-03-31T20:35:12.9988189Z","duration":"PT35.8037854S","correlationId":"6403380a-6db3-4483-96b0-47128f81da59","providers":[{"namespace":"Microsoft.Network","resourceTypes":[{"resourceType":"connections","locations":["westus"]}]}],"dependencies":[],"outputs":{"resource":{"type":"Object","value":{"provisioningState":"Succeeded","resourceGuid":"119e9869-0d0c-4bf7-92fb-2385969d918f","virtualNetworkGateway1":{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw2"},"virtualNetworkGateway2":{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_active_active_vnet_vnet_connection/providers/Microsoft.Network/virtualNetworkGateways/vgw1"},"connectionType":"Vnet2Vnet","routingWeight":10,"sharedKey":"abc123","enableBgp":true,"connectionStatus":"Unknown","ingressBytesTransferred":0,"egressBytesTransferred":0}}},"outputResources":[{"id":"Microsoft.Network/connections/vnet2to1"}]}}'}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Fri, 31 Mar 2017 20:35:38 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Vary: [Accept-Encoding]
+ content-length: ['1458']
+ status: {code: 200, message: OK}
+version: 1
diff --git a/src/command_modules/azure-cli-network/tests/test_network_commands.py b/src/command_modules/azure-cli-network/tests/test_network_commands.py
index fc57f2a03..92c9a92b3 100644
--- a/src/command_modules/azure-cli-network/tests/test_network_commands.py
+++ b/src/command_modules/azure-cli-network/tests/test_network_commands.py
@@ -1090,6 +1090,123 @@ class NetworkSubnetSetScenarioTest(ResourceGroupVCRTestBase):
self.cmd('network vnet delete --resource-group {} --name {}'.format(self.resource_group, self.vnet_name))
self.cmd('network nsg delete --resource-group {} --name {}'.format(self.resource_group, nsg_name))
+class NetworkActiveActiveCrossPremiseScenarioTest(ResourceGroupVCRTestBase): # pylint: disable=too-many-instance-attributes
+
+ def __init__(self, test_method):
+ super(NetworkActiveActiveCrossPremiseScenarioTest, self).__init__(__file__, test_method, resource_group='cli_test_active_active_cross_premise_connection')
+ self.vnet1 = 'vnet1'
+ self.gw_subnet = 'GatewaySubnet'
+ self.vnet_prefix1 = '10.11.0.0/16'
+ self.vnet_prefix2 = '10.12.0.0/16'
+ self.gw_subnet_prefix = '10.12.255.0/27'
+ self.gw_ip1 = 'gwip1'
+ self.gw_ip2 = 'gwip2'
+
+ def test_network_active_active_cross_premise_connection(self):
+ self.execute()
+
+ def set_up(self):
+ super(NetworkActiveActiveCrossPremiseScenarioTest, self).set_up()
+ rg = self.resource_group
+
+ self.cmd('network vnet create -g {} -n {} --address-prefix {} {} --subnet-name {} --subnet-prefix {}'.format(rg, self.vnet1, self.vnet_prefix1, self.vnet_prefix2, self.gw_subnet, self.gw_subnet_prefix))
+ self.cmd('network public-ip create -g {} -n {}'.format(rg, self.gw_ip1))
+ self.cmd('network public-ip create -g {} -n {}'.format(rg, self.gw_ip2))
+
+ def body(self):
+ rg = self.resource_group
+ vnet1 = self.vnet1
+ vnet1_asn = 65010
+ gw1 = 'gw1'
+
+ lgw2 = 'lgw2'
+ lgw_ip = '131.107.72.22'
+ lgw_prefix = '10.52.255.253/32'
+ bgp_peer1 = '10.52.255.253'
+ lgw_asn = 65050
+ lgw_loc = 'eastus'
+ conn_151 = 'Vnet1toSite5_1'
+ conn_152 = 'Vnet1toSite5_2'
+ shared_key = 'abc123'
+
+ # create the vnet gateway with active-active feature
+ self.cmd('network vnet-gateway create -g {} -n {} --vnet {} --sku HighPerformance --asn {} --public-ip-addresses {} {}'.format(rg, gw1, vnet1, vnet1_asn, self.gw_ip1, self.gw_ip2))
+
+ # create and connect first local-gateway
+ self.cmd('network local-gateway create -g {} -n {} -l {} --gateway-ip-address {} --local-address-prefixes {} --asn {} --bgp-peering-address {}'.format(rg, lgw2, lgw_loc, lgw_ip, lgw_prefix, lgw_asn, bgp_peer1))
+ self.cmd('network vpn-connection create -g {} -n {} --vnet-gateway1 {} --local-gateway2 {} --shared-key {} --enable-bgp'.format(rg, conn_151, gw1, lgw2, shared_key))
+
+ lgw3 = 'lgw3'
+ lgw3_ip = '131.107.72.23'
+ lgw3_prefix = '10.52.255.254/32'
+ bgp_peer2 = '10.52.255.254'
+
+ # create and connect second local-gateway
+ self.cmd('network local-gateway create -g {} -n {} -l {} --gateway-ip-address {} --local-address-prefixes {} --asn {} --bgp-peering-address {}'.format(rg, lgw3, lgw_loc, lgw3_ip, lgw3_prefix, lgw_asn, bgp_peer2))
+ self.cmd('network vpn-connection create -g {} -n {} --vnet-gateway1 {} --local-gateway2 {} --shared-key {} --enable-bgp'.format(rg, conn_152, gw1, lgw3, shared_key))
+
+
+class NetworkActiveActiveVnetVnetScenarioTest(ResourceGroupVCRTestBase): # pylint: disable=too-many-instance-attributes
+
+ def __init__(self, test_method):
+ super(NetworkActiveActiveVnetVnetScenarioTest, self).__init__(__file__, test_method, resource_group='cli_test_active_active_vnet_vnet_connection')
+ self.gw_subnet = 'GatewaySubnet'
+
+ # First VNet
+ self.vnet1 = 'vnet1'
+ self.vnet1_prefix = '10.21.0.0/16'
+ self.gw1_subnet_prefix = '10.21.255.0/27'
+ self.gw1_ip1 = 'gw1ip1'
+ self.gw1_ip2 = 'gw1ip2'
+
+ # Second VNet
+ self.vnet2 = 'vnet2'
+ self.vnet2_prefix = '10.22.0.0/16'
+ self.gw2_subnet_prefix = '10.22.255.0/27'
+ self.gw2_ip1 = 'gw2ip1'
+ self.gw2_ip2 = 'gw2ip2'
+
+ def test_network_active_active_vnet_vnet_connection(self):
+ self.execute()
+
+ def set_up(self):
+ super(NetworkActiveActiveVnetVnetScenarioTest, self).set_up()
+ rg = self.resource_group
+
+ # Create one VNet with two public IPs
+ self.cmd('network vnet create -g {} -n {} --address-prefix {} --subnet-name {} --subnet-prefix {}'.format(rg, self.vnet1, self.vnet1_prefix, self.gw_subnet, self.gw1_subnet_prefix))
+ self.cmd('network public-ip create -g {} -n {}'.format(rg, self.gw1_ip1))
+ self.cmd('network public-ip create -g {} -n {}'.format(rg, self.gw1_ip2))
+
+ # Create second VNet with two public IPs
+ self.cmd('network vnet create -g {} -n {} --address-prefix {} --subnet-name {} --subnet-prefix {}'.format(rg, self.vnet2, self.vnet2_prefix, self.gw_subnet, self.gw2_subnet_prefix))
+ self.cmd('network public-ip create -g {} -n {}'.format(rg, self.gw2_ip1))
+ self.cmd('network public-ip create -g {} -n {}'.format(rg, self.gw2_ip2))
+
+ def body(self):
+ rg = self.resource_group
+ vnet1 = self.vnet1
+ vnet1_asn = 65010
+ gw1 = 'vgw1'
+ self.cmd('network vnet-gateway create -g {} -n {} --vnet {} --sku HighPerformance --asn {} --public-ip-addresses {} {} --no-wait'.format(rg, gw1, vnet1, vnet1_asn, self.gw1_ip1, self.gw1_ip2))
+
+ vnet2 = self.vnet2
+ vnet2_asn = 65020
+ gw2 = 'vgw2'
+ self.cmd('network vnet-gateway create -g {} -n {} --vnet {} --sku HighPerformance --asn {} --public-ip-addresses {} {} --no-wait'.format(rg, gw2, vnet2, vnet2_asn, self.gw2_ip1, self.gw2_ip2))
+
+ # wait for gateway completion to finish
+ self.cmd('network vnet-gateway wait -g {} -n {} --created'.format(rg, gw1))
+ self.cmd('network vnet-gateway wait -g {} -n {} --created'.format(rg, gw2))
+
+ conn12 = 'vnet1to2'
+ conn21 = 'vnet2to1'
+ shared_key = 'abc123'
+
+ # create and connect the VNet gateways
+ self.cmd('network vpn-connection create -g {} -n {} --vnet-gateway1 {} --vnet-gateway2 {} --shared-key {} --enable-bgp'.format(rg, conn12, gw1, gw2, shared_key))
+ self.cmd('network vpn-connection create -g {} -n {} --vnet-gateway1 {} --vnet-gateway2 {} --shared-key {} --enable-bgp'.format(rg, conn21, gw2, gw1, shared_key))
+
class NetworkVpnGatewayScenarioTest(ResourceGroupVCRTestBase): # pylint: disable=too-many-instance-attributes
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": -1,
"issue_text_score": 1,
"test_score": -1
},
"num_modified_files": 5
} | 2.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "python scripts/dev_setup.py",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | adal==0.4.3
applicationinsights==0.10.0
argcomplete==1.8.0
astroid==1.4.9
attrs==22.2.0
autopep8==1.2.4
azure-batch==2.0.0
-e git+https://github.com/Azure/azure-cli.git@678b940f29230f53fd7cbec7211842e99658ed3d#egg=azure_cli&subdirectory=src/azure-cli
-e git+https://github.com/Azure/azure-cli.git@678b940f29230f53fd7cbec7211842e99658ed3d#egg=azure_cli_acr&subdirectory=src/command_modules/azure-cli-acr
-e git+https://github.com/Azure/azure-cli.git@678b940f29230f53fd7cbec7211842e99658ed3d#egg=azure_cli_acs&subdirectory=src/command_modules/azure-cli-acs
-e git+https://github.com/Azure/azure-cli.git@678b940f29230f53fd7cbec7211842e99658ed3d#egg=azure_cli_appservice&subdirectory=src/command_modules/azure-cli-appservice
-e git+https://github.com/Azure/azure-cli.git@678b940f29230f53fd7cbec7211842e99658ed3d#egg=azure_cli_batch&subdirectory=src/command_modules/azure-cli-batch
-e git+https://github.com/Azure/azure-cli.git@678b940f29230f53fd7cbec7211842e99658ed3d#egg=azure_cli_cloud&subdirectory=src/command_modules/azure-cli-cloud
-e git+https://github.com/Azure/azure-cli.git@678b940f29230f53fd7cbec7211842e99658ed3d#egg=azure_cli_component&subdirectory=src/command_modules/azure-cli-component
-e git+https://github.com/Azure/azure-cli.git@678b940f29230f53fd7cbec7211842e99658ed3d#egg=azure_cli_configure&subdirectory=src/command_modules/azure-cli-configure
-e git+https://github.com/Azure/azure-cli.git@678b940f29230f53fd7cbec7211842e99658ed3d#egg=azure_cli_container&subdirectory=src/command_modules/azure-cli-container
-e git+https://github.com/Azure/azure-cli.git@678b940f29230f53fd7cbec7211842e99658ed3d#egg=azure_cli_core&subdirectory=src/azure-cli-core
-e git+https://github.com/Azure/azure-cli.git@678b940f29230f53fd7cbec7211842e99658ed3d#egg=azure_cli_dla&subdirectory=src/command_modules/azure-cli-dla
-e git+https://github.com/Azure/azure-cli.git@678b940f29230f53fd7cbec7211842e99658ed3d#egg=azure_cli_dls&subdirectory=src/command_modules/azure-cli-dls
-e git+https://github.com/Azure/azure-cli.git@678b940f29230f53fd7cbec7211842e99658ed3d#egg=azure_cli_documentdb&subdirectory=src/command_modules/azure-cli-documentdb
-e git+https://github.com/Azure/azure-cli.git@678b940f29230f53fd7cbec7211842e99658ed3d#egg=azure_cli_feedback&subdirectory=src/command_modules/azure-cli-feedback
-e git+https://github.com/Azure/azure-cli.git@678b940f29230f53fd7cbec7211842e99658ed3d#egg=azure_cli_find&subdirectory=src/command_modules/azure-cli-find
-e git+https://github.com/Azure/azure-cli.git@678b940f29230f53fd7cbec7211842e99658ed3d#egg=azure_cli_iot&subdirectory=src/command_modules/azure-cli-iot
-e git+https://github.com/Azure/azure-cli.git@678b940f29230f53fd7cbec7211842e99658ed3d#egg=azure_cli_keyvault&subdirectory=src/command_modules/azure-cli-keyvault
-e git+https://github.com/Azure/azure-cli.git@678b940f29230f53fd7cbec7211842e99658ed3d#egg=azure_cli_lab&subdirectory=src/command_modules/azure-cli-lab
-e git+https://github.com/Azure/azure-cli.git@678b940f29230f53fd7cbec7211842e99658ed3d#egg=azure_cli_monitor&subdirectory=src/command_modules/azure-cli-monitor
-e git+https://github.com/Azure/azure-cli.git@678b940f29230f53fd7cbec7211842e99658ed3d#egg=azure_cli_network&subdirectory=src/command_modules/azure-cli-network
-e git+https://github.com/Azure/azure-cli.git@678b940f29230f53fd7cbec7211842e99658ed3d#egg=azure_cli_nspkg&subdirectory=src/azure-cli-nspkg
-e git+https://github.com/Azure/azure-cli.git@678b940f29230f53fd7cbec7211842e99658ed3d#egg=azure_cli_profile&subdirectory=src/command_modules/azure-cli-profile
-e git+https://github.com/Azure/azure-cli.git@678b940f29230f53fd7cbec7211842e99658ed3d#egg=azure_cli_redis&subdirectory=src/command_modules/azure-cli-redis
-e git+https://github.com/Azure/azure-cli.git@678b940f29230f53fd7cbec7211842e99658ed3d#egg=azure_cli_resource&subdirectory=src/command_modules/azure-cli-resource
-e git+https://github.com/Azure/azure-cli.git@678b940f29230f53fd7cbec7211842e99658ed3d#egg=azure_cli_role&subdirectory=src/command_modules/azure-cli-role
-e git+https://github.com/Azure/azure-cli.git@678b940f29230f53fd7cbec7211842e99658ed3d#egg=azure_cli_sql&subdirectory=src/command_modules/azure-cli-sql
-e git+https://github.com/Azure/azure-cli.git@678b940f29230f53fd7cbec7211842e99658ed3d#egg=azure_cli_storage&subdirectory=src/command_modules/azure-cli-storage
-e git+https://github.com/Azure/azure-cli.git@678b940f29230f53fd7cbec7211842e99658ed3d#egg=azure_cli_taskhelp&subdirectory=src/command_modules/azure-cli-taskhelp
-e git+https://github.com/Azure/azure-cli.git@678b940f29230f53fd7cbec7211842e99658ed3d#egg=azure_cli_testsdk&subdirectory=src/azure-cli-testsdk
-e git+https://github.com/Azure/azure-cli.git@678b940f29230f53fd7cbec7211842e99658ed3d#egg=azure_cli_utility_automation&subdirectory=scripts
-e git+https://github.com/Azure/azure-cli.git@678b940f29230f53fd7cbec7211842e99658ed3d#egg=azure_cli_vm&subdirectory=src/command_modules/azure-cli-vm
azure-common==1.1.4
azure-core==1.24.2
azure-datalake-store==0.0.6
azure-graphrbac==0.30.0rc6
azure-keyvault==0.1.0
azure-mgmt-authorization==0.30.0rc6
azure-mgmt-batch==3.0.0
azure-mgmt-compute==0.33.1rc1
azure-mgmt-containerregistry==0.2.0
azure-mgmt-datalake-analytics==0.1.3
azure-mgmt-datalake-nspkg==3.0.1
azure-mgmt-datalake-store==0.1.3
azure-mgmt-dns==1.0.0
azure-mgmt-documentdb==0.1.1
azure-mgmt-iothub==0.2.1
azure-mgmt-keyvault==0.30.0
azure-mgmt-monitor==0.1.0
azure-mgmt-network==0.30.0
azure-mgmt-nspkg==3.0.2
azure-mgmt-redis==1.0.0
azure-mgmt-resource==0.30.2
azure-mgmt-sql==0.4.0
azure-mgmt-storage==0.31.0
azure-mgmt-trafficmanager==0.30.0rc6
azure-mgmt-web==0.31.0
azure-monitor==0.2.0
azure-nspkg==3.0.2
azure-storage==0.34.0
certifi==2021.5.30
cffi==1.15.1
colorama==0.3.7
coverage==4.2
cryptography==40.0.2
flake8==3.2.1
importlib-metadata==4.8.3
iniconfig==1.1.1
isodate==0.7.0
jeepney==0.7.1
jmespath==0.10.0
keyring==23.4.1
lazy-object-proxy==1.7.1
mccabe==0.5.3
mock==1.3.0
msrest==0.4.29
msrestazure==0.4.34
nose==1.3.7
oauthlib==3.2.2
packaging==21.3
paramiko==2.0.2
pbr==6.1.1
pep8==1.7.1
pluggy==1.0.0
py==1.11.0
pyasn1==0.5.1
pycodestyle==2.2.0
pycparser==2.21
pyflakes==1.3.0
Pygments==2.1.3
PyJWT==2.4.0
pylint==1.5.4
pyOpenSSL==16.2.0
pyparsing==3.1.4
pytest==7.0.1
python-dateutil==2.9.0.post0
PyYAML==3.11
requests==2.9.1
requests-oauthlib==2.0.0
scp==0.15.0
SecretStorage==3.3.3
six==1.10.0
sshtunnel==0.4.0
tabulate==0.7.5
tomli==1.2.3
typing-extensions==4.1.1
urllib3==1.16
vcrpy==1.10.3
Whoosh==2.7.4
wrapt==1.16.0
xmltodict==0.14.2
zipp==3.6.0
| name: azure-cli
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- adal==0.4.3
- applicationinsights==0.10.0
- argcomplete==1.8.0
- astroid==1.4.9
- attrs==22.2.0
- autopep8==1.2.4
- azure-batch==2.0.0
- azure-common==1.1.4
- azure-core==1.24.2
- azure-datalake-store==0.0.6
- azure-graphrbac==0.30.0rc6
- azure-keyvault==0.1.0
- azure-mgmt-authorization==0.30.0rc6
- azure-mgmt-batch==3.0.0
- azure-mgmt-compute==0.33.1rc1
- azure-mgmt-containerregistry==0.2.0
- azure-mgmt-datalake-analytics==0.1.3
- azure-mgmt-datalake-nspkg==3.0.1
- azure-mgmt-datalake-store==0.1.3
- azure-mgmt-dns==1.0.0
- azure-mgmt-documentdb==0.1.1
- azure-mgmt-iothub==0.2.1
- azure-mgmt-keyvault==0.30.0
- azure-mgmt-monitor==0.1.0
- azure-mgmt-network==0.30.0
- azure-mgmt-nspkg==3.0.2
- azure-mgmt-redis==1.0.0
- azure-mgmt-resource==0.30.2
- azure-mgmt-sql==0.4.0
- azure-mgmt-storage==0.31.0
- azure-mgmt-trafficmanager==0.30.0rc6
- azure-mgmt-web==0.31.0
- azure-monitor==0.2.0
- azure-nspkg==3.0.2
- azure-storage==0.34.0
- cffi==1.15.1
- colorama==0.3.7
- coverage==4.2
- cryptography==40.0.2
- flake8==3.2.1
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- isodate==0.7.0
- jeepney==0.7.1
- jmespath==0.10.0
- keyring==23.4.1
- lazy-object-proxy==1.7.1
- mccabe==0.5.3
- mock==1.3.0
- msrest==0.4.29
- msrestazure==0.4.34
- nose==1.3.7
- oauthlib==3.2.2
- packaging==21.3
- paramiko==2.0.2
- pbr==6.1.1
- pep8==1.7.1
- pip==9.0.1
- pluggy==1.0.0
- py==1.11.0
- pyasn1==0.5.1
- pycodestyle==2.2.0
- pycparser==2.21
- pyflakes==1.3.0
- pygments==2.1.3
- pyjwt==2.4.0
- pylint==1.5.4
- pyopenssl==16.2.0
- pyparsing==3.1.4
- pytest==7.0.1
- python-dateutil==2.9.0.post0
- pyyaml==3.11
- requests==2.9.1
- requests-oauthlib==2.0.0
- scp==0.15.0
- secretstorage==3.3.3
- setuptools==30.4.0
- six==1.10.0
- sshtunnel==0.4.0
- tabulate==0.7.5
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.16
- vcrpy==1.10.3
- whoosh==2.7.4
- wrapt==1.16.0
- xmltodict==0.14.2
- zipp==3.6.0
prefix: /opt/conda/envs/azure-cli
| [
"src/command_modules/azure-cli-network/tests/test_network_commands.py::NetworkLocalGatewayScenarioTest::test_network_local_gateway",
"src/command_modules/azure-cli-network/tests/test_network_commands.py::NetworkActiveActiveCrossPremiseScenarioTest::test_network_active_active_cross_premise_connection",
"src/command_modules/azure-cli-network/tests/test_network_commands.py::NetworkActiveActiveVnetVnetScenarioTest::test_network_active_active_vnet_vnet_connection"
]
| []
| [
"src/command_modules/azure-cli-network/tests/test_network_commands.py::NetworkMultiIdsShowScenarioTest::test_multi_id_show",
"src/command_modules/azure-cli-network/tests/test_network_commands.py::NetworkUsageListScenarioTest::test_network_usage_list",
"src/command_modules/azure-cli-network/tests/test_network_commands.py::NetworkAppGatewayDefaultScenarioTest::test_network_app_gateway_with_defaults",
"src/command_modules/azure-cli-network/tests/test_network_commands.py::NetworkAppGatewayExistingSubnetScenarioTest::test_network_app_gateway_with_existing_subnet",
"src/command_modules/azure-cli-network/tests/test_network_commands.py::NetworkAppGatewayNoWaitScenarioTest::test_network_app_gateway_no_wait",
"src/command_modules/azure-cli-network/tests/test_network_commands.py::NetworkAppGatewayPrivateIpScenarioTest::test_network_app_gateway_with_private_ip",
"src/command_modules/azure-cli-network/tests/test_network_commands.py::NetworkAppGatewayPublicIpScenarioTest::test_network_app_gateway_with_public_ip",
"src/command_modules/azure-cli-network/tests/test_network_commands.py::NetworkAppGatewayWafScenarioTest::test_network_app_gateway_waf",
"src/command_modules/azure-cli-network/tests/test_network_commands.py::NetworkPublicIpScenarioTest::test_network_public_ip",
"src/command_modules/azure-cli-network/tests/test_network_commands.py::NetworkExpressRouteScenarioTest::test_network_express_route",
"src/command_modules/azure-cli-network/tests/test_network_commands.py::NetworkLoadBalancerScenarioTest::test_network_load_balancer",
"src/command_modules/azure-cli-network/tests/test_network_commands.py::NetworkLoadBalancerIpConfigScenarioTest::test_network_load_balancer_ip_config",
"src/command_modules/azure-cli-network/tests/test_network_commands.py::NetworkLoadBalancerSubresourceScenarioTest::test_network_load_balancer_subresources",
"src/command_modules/azure-cli-network/tests/test_network_commands.py::NetworkNicScenarioTest::test_network_nic",
"src/command_modules/azure-cli-network/tests/test_network_commands.py::NetworkNicSubresourceScenarioTest::test_network_nic_subresources",
"src/command_modules/azure-cli-network/tests/test_network_commands.py::NetworkNicConvenienceCommandsScenarioTest::test_network_nic_convenience_commands",
"src/command_modules/azure-cli-network/tests/test_network_commands.py::NetworkSecurityGroupScenarioTest::test_network_nsg",
"src/command_modules/azure-cli-network/tests/test_network_commands.py::NetworkRouteTableOperationScenarioTest::test_network_route_table_operation",
"src/command_modules/azure-cli-network/tests/test_network_commands.py::NetworkVNetScenarioTest::test_network_vnet",
"src/command_modules/azure-cli-network/tests/test_network_commands.py::NetworkVNetPeeringScenarioTest::test_network_vnet_peering",
"src/command_modules/azure-cli-network/tests/test_network_commands.py::NetworkSubnetSetScenarioTest::test_network_subnet_set",
"src/command_modules/azure-cli-network/tests/test_network_commands.py::NetworkVpnGatewayScenarioTest::test_network_vpn_gateway",
"src/command_modules/azure-cli-network/tests/test_network_commands.py::NetworkTrafficManagerScenarioTest::test_network_traffic_manager",
"src/command_modules/azure-cli-network/tests/test_network_commands.py::NetworkDnsScenarioTest::test_network_dns",
"src/command_modules/azure-cli-network/tests/test_network_commands.py::NetworkZoneImportExportTest::test_network_dns_zone_import_export"
]
| []
| MIT License | 1,147 | [
"src/command_modules/azure-cli-network/HISTORY.rst",
"src/command_modules/azure-cli-network/azure/cli/command_modules/network/_help.py",
"src/command_modules/azure-cli-network/azure/cli/command_modules/network/custom.py",
"src/command_modules/azure-cli-network/azure/cli/command_modules/network/_params.py",
"src/command_modules/azure-cli-network/azure/cli/command_modules/network/_validators.py"
]
| [
"src/command_modules/azure-cli-network/HISTORY.rst",
"src/command_modules/azure-cli-network/azure/cli/command_modules/network/_help.py",
"src/command_modules/azure-cli-network/azure/cli/command_modules/network/custom.py",
"src/command_modules/azure-cli-network/azure/cli/command_modules/network/_params.py",
"src/command_modules/azure-cli-network/azure/cli/command_modules/network/_validators.py"
]
|
smarter-travel-media__warthog-14 | 7e2c15be2747b71b2b6ef18c3191355489eaceb6 | 2017-04-04 21:30:36 | 7e2c15be2747b71b2b6ef18c3191355489eaceb6 | diff --git a/warthog/config.py b/warthog/config.py
index 3dfab6f..9c72399 100644
--- a/warthog/config.py
+++ b/warthog/config.py
@@ -15,15 +15,14 @@ Load and parse configuration for a client from an INI-style file.
"""
import collections
-import threading
-import ssl
import sys
-
+import threading
import codecs
import os.path
+
import warthog.exceptions
+import warthog.ssl
from .packages import six
-# pylint: disable=import-error
from .packages.six.moves import configparser
# List of locations (from most preferred to least preferred) that will
@@ -38,12 +37,10 @@ DEFAULT_CONFIG_LOCATIONS = [
os.path.join(os.getcwd(), 'warthog.ini')
]
-
# By default, we assume that the configuration file is in UTF-8 unless
# the caller indicates it is in some other encoding.
DEFAULT_CONFIG_ENCODING = 'utf-8'
-
# Simple immutable struct to hold configuration information for a WarthogClient
WarthogConfigSettings = collections.namedtuple(
'WarthogConfigSettings', ['scheme_host', 'username', 'password', 'verify', 'ssl_version'])
@@ -163,10 +160,14 @@ class WarthogConfigLoader(object):
def parse_ssl_version(version_str, ssl_module=None):
- """Get the :mod:`ssl` protocol constant that represents the given version
+ """Get the :mod:`warthog.ssl` protocol constant that represents the given version
string if it exists, raising an error if the version string is malformed or
does not correspond to a supported protocol.
+ Note that the :mod:`warthog.ssl` protocol constants should match the Python
+ :mod:`ssl` module exactly. The difference is that our SSL module has all
+ potential versions while older Python modules did not.
+
:param unicode version_str: Version string to resolve to a protocol
:param module ssl_module: SSL module to get the protocol constant from
:return: The ssl module protocol constant or ``None``
@@ -180,7 +181,7 @@ def parse_ssl_version(version_str, ssl_module=None):
if not version_str:
return None
- ssl_module = ssl_module if ssl_module is not None else ssl
+ ssl_module = ssl_module if ssl_module is not None else warthog.ssl
# Get a list of all the 'PROTOCOL' constants in the SSL module, and
# strip the 'PROTOCOL_' prefix. This is the set of supported SSL or
diff --git a/warthog/ssl.py b/warthog/ssl.py
new file mode 100644
index 0000000..dea969f
--- /dev/null
+++ b/warthog/ssl.py
@@ -0,0 +1,31 @@
+# -*- coding: utf-8 -*-
+#
+# Warthog - Simple client for A10 load balancers
+#
+# Copyright 2014-2016 Smarter Travel
+#
+# Available under the MIT license. See LICENSE for details.
+#
+
+"""
+warthog.ssl
+~~~~~~~~~~~
+
+SSL related constants used by Warthog
+"""
+
+# Define our own versions of expected constants in the Python ssl
+# module since older Python versions didn't define all of them. For
+# example Python 2.6 and Python 3.3 don't include TLSv1.1 or TLSv1.2
+# and we need to support the combination of those Python versions
+# and TLS versions. Kinda hacky but required. Such is life.
+
+PROTOCOL_SSLv3 = 1
+
+PROTOCOL_SSLv23 = 2
+
+PROTOCOL_TLSv1 = 3
+
+PROTOCOL_TLSv1_1 = 4
+
+PROTOCOL_TLSv1_2 = 5
diff --git a/warthog/transport.py b/warthog/transport.py
index 836a526..776d2c8 100644
--- a/warthog/transport.py
+++ b/warthog/transport.py
@@ -26,17 +26,12 @@ from requests.adapters import (
from requests.packages.urllib3.exceptions import InsecureRequestWarning
from requests.packages.urllib3.poolmanager import PoolManager
-# HACK: We need to default to TLSv1.2 to work with the new load balancer
-# but Python 2.6 and Python 3.3 don't have the TLSv1.2 constant. BUT, TLS
-# version 1.2 will work with the version of requests we use on Python 2.6
-# so we hack in the constant here for the sake of a default.
-# pylint: disable=invalid-name
-_PROTOCOL_TLSv1_2 = 5
+import warthog.ssl
# Default to using the SSL/TLS version that the A10 requires instead of
# the default that the requests/urllib3 library picks. Or, maybe the A10
# just doesn't allow the client to negotiate. Either way, we use TLSv1.2.
-DEFAULT_SSL_VERSION = _PROTOCOL_TLSv1_2
+DEFAULT_SSL_VERSION = warthog.ssl.PROTOCOL_TLSv1_2
# Default to verifying SSL/TLS certs because "safe by default" is a good idea.
DEFAULT_CERT_VERIFY = True
| Remove dependency on `ssl` module protocol constants in warthog.confg
Like the issue in https://github.com/smarter-travel-media/warthog/commit/a3c6ed378ff3c83133ca18898644eb356d203067 we need to remove dependence on the `ssl` module in `warthog.confg` to allow use on Python 2.6 | smarter-travel-media/warthog | diff --git a/test/test_ssl.py b/test/test_ssl.py
new file mode 100644
index 0000000..9165683
--- /dev/null
+++ b/test/test_ssl.py
@@ -0,0 +1,43 @@
+# -*- coding: utf-8 -*-
+
+import ssl
+
+import warthog.ssl
+
+
+# Test our hacky constants to make sure we haven't shot ourselves in the
+# foot in a completely obvious and predictable way.
+
+
+def test_ssl3_matches():
+ assert ssl.PROTOCOL_SSLv3 == warthog.ssl.PROTOCOL_SSLv3
+
+
+def test_ssl23_matches():
+ assert ssl.PROTOCOL_SSLv23 == warthog.ssl.PROTOCOL_SSLv23
+
+
+def test_tls1_matches():
+ assert ssl.PROTOCOL_TLSv1 == warthog.ssl.PROTOCOL_TLSv1
+
+
+def test_tls1_1_matches():
+ try:
+ # It's possible that we're running under an old version of Python
+ # and this constant doesn't exist (hence why warthog.ssl exists).
+ module_const = ssl.PROTOCOL_TLSv1_1
+ except AttributeError:
+ return
+
+ assert module_const == warthog.ssl.PROTOCOL_TLSv1_1
+
+
+def test_tls1_2_matches():
+ try:
+ # It's possible that we're running under an old version of Python
+ # and this constant doesn't exist (hence why warthog.ssl exists).
+ module_const = ssl.PROTOCOL_TLSv1_2
+ except AttributeError:
+ return
+
+ assert module_const == warthog.ssl.PROTOCOL_TLSv1_2
diff --git a/test/test_transport.py b/test/test_transport.py
index fd5d81a..3d399e6 100644
--- a/test/test_transport.py
+++ b/test/test_transport.py
@@ -1,7 +1,6 @@
# -*- coding: utf-8 -*-
-import ssl
-
+import warthog.ssl
import warthog.transport
@@ -16,11 +15,11 @@ def test_get_transport_factory_no_verify():
def test_get_transport_factory_alternate_ssl_version():
- factory = warthog.transport.get_transport_factory(ssl_version=ssl.PROTOCOL_SSLv3)
+ factory = warthog.transport.get_transport_factory(ssl_version=warthog.ssl.PROTOCOL_TLSv1_1)
session = factory()
adapter = session.get_adapter('https://lb.example.com')
- assert ssl.PROTOCOL_SSLv3 == adapter.ssl_version, 'Did not get expected SSL version'
+ assert warthog.ssl.PROTOCOL_TLSv1_1 == adapter.ssl_version, 'Did not get expected SSL version'
def test_get_transport_factory_with_defaults():
@@ -31,18 +30,3 @@ def test_get_transport_factory_with_defaults():
assert warthog.transport.DEFAULT_SSL_VERSION == adapter.ssl_version, 'Did not get default TLS version'
assert warthog.transport.DEFAULT_CERT_VERIFY == session.verify, 'Did not get default verify setting'
-
-def test_default_tls_version_matches_ssl_module():
- try:
- import ssl
- module_version = ssl.PROTOCOL_TLSv1_2
- except AttributeError:
- # Running an old version of Python that doesn't have the version
- # constant. This is the reason we need to use our own and we can't
- # verify that it's right here so just end.
- return
-
- # Make sure that our default version matches the actual constant in the
- # ssl module. This is really just a sanity check to make sure this hack
- # doesn't blow up in our face
- assert module_version == warthog.transport.DEFAULT_SSL_VERSION
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_added_files",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 1
},
"num_modified_files": 2
} | 1.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | click==6.7
exceptiongroup==1.2.2
iniconfig==2.1.0
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
requests==2.11.1
tomli==2.2.1
-e git+https://github.com/smarter-travel-media/warthog.git@7e2c15be2747b71b2b6ef18c3191355489eaceb6#egg=warthog
| name: warthog
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- click==6.7
- exceptiongroup==1.2.2
- iniconfig==2.1.0
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- requests==2.11.1
- tomli==2.2.1
prefix: /opt/conda/envs/warthog
| [
"test/test_ssl.py::test_ssl23_matches",
"test/test_ssl.py::test_tls1_matches",
"test/test_ssl.py::test_tls1_1_matches",
"test/test_ssl.py::test_tls1_2_matches",
"test/test_transport.py::test_get_transport_factory_no_verify",
"test/test_transport.py::test_get_transport_factory_alternate_ssl_version",
"test/test_transport.py::test_get_transport_factory_with_defaults"
]
| [
"test/test_ssl.py::test_ssl3_matches"
]
| []
| []
| MIT License | 1,148 | [
"warthog/ssl.py",
"warthog/transport.py",
"warthog/config.py"
]
| [
"warthog/ssl.py",
"warthog/transport.py",
"warthog/config.py"
]
|
|
planetarypy__pvl-28 | 27b4128d10b4dcc0f14c64fcc61bedc2e27030bf | 2017-04-04 23:06:42 | 0fc3b804f214170b28f94bd5b3703f439c968f92 | diff --git a/pvl/_collections.py b/pvl/_collections.py
index 2abaf3a..dcb4b91 100644
--- a/pvl/_collections.py
+++ b/pvl/_collections.py
@@ -41,6 +41,10 @@ class KeysView(MappingView):
keys = [key for key, _ in self._mapping]
return '%s(%r)' % (type(self).__name__, keys)
+ def index(self, key):
+ keys = [k for k, _ in self._mapping]
+ return keys.index(key)
+
class ItemsView(MappingView):
def __contains__(self, item):
@@ -54,6 +58,10 @@ class ItemsView(MappingView):
def __getitem__(self, index):
return self._mapping[index]
+ def index(self, item):
+ items = [i for i in self._mapping]
+ return items.index(item)
+
class ValuesView(MappingView):
def __contains__(self, value):
@@ -73,6 +81,10 @@ class ValuesView(MappingView):
values = [value for _, value in self._mapping]
return '%s(%r)' % (type(self).__name__, values)
+ def index(self, value):
+ values = [val for _, val in self._mapping]
+ return values.index(value)
+
class OrderedMultiDict(dict, MutableMapping):
"""A ``dict`` like container.
@@ -250,6 +262,67 @@ class OrderedMultiDict(dict, MutableMapping):
def copy(self):
return type(self)(self)
+ def __insert_wrapper(func):
+ """Make sure the arguments given to the insert methods are correct"""
+ def check_func(self, key, new_item, instance=0):
+ if key not in self.keys():
+ raise KeyError("%s not a key in label" % (key))
+ if not isinstance(new_item, (list, OrderedMultiDict)):
+ raise TypeError("The new item must be a list or PVLModule")
+ if isinstance(new_item, OrderedMultiDict):
+ new_item = list(new_item)
+ return func(self, key, new_item, instance)
+ return check_func
+
+ def _get_index_for_insert(self, key, instance):
+ """Get the index of the key to insert before or after"""
+ if instance == 0:
+ # Index method will return the first occurence of the key
+ index = self.keys().index(key)
+ else:
+ occurrence = -1
+ for index, k in enumerate(self.keys()):
+ if k == key:
+ occurrence += 1
+ if occurrence == instance:
+ # Found the key and the correct occurence of the key
+ break
+
+ if occurrence != instance:
+ # Gone through the entire list of keys and the instance number
+ # given is too high for the number of occurences of the key
+ raise ValueError(
+ (
+ "Cannot insert before/after the %d "
+ "instance of the key '%s' since there are "
+ "only %d occurences of the key" % (
+ instance, key, occurrence)
+ ))
+ return index
+
+ def _insert_item(self, key, new_item, instance, is_after):
+ """Insert a new item before or after another item"""
+ index = self._get_index_for_insert(key, instance)
+ index = index + 1 if is_after else index
+ self.__items = self.__items[:index] + new_item + self.__items[index:]
+ # Make sure indexing works with new items
+ for new_key, new_value in new_item:
+ if new_key in self:
+ value_list = [val for k, val in self.__items if k == new_key]
+ dict_setitem(self, new_key, value_list)
+ else:
+ dict_setitem(self, new_key, [new_value])
+
+ @__insert_wrapper
+ def insert_after(self, key, new_item, instance=0):
+ """Insert an item after a key"""
+ self._insert_item(key, new_item, instance, True)
+
+ @__insert_wrapper
+ def insert_before(self, key, new_item, instance=0):
+ """Insert an item before a key"""
+ self._insert_item(key, new_item, instance, False)
+
class PVLModule(OrderedMultiDict):
pass
| Inserting key/value pairs in PDS label
Is there a way to insert desired key/value lines at a specific point in a PDS label?
ex:
from
```
PDS_VERSION_ID =
DD_VERSION_ID =
LABEL_REVISION_NOTE =
^IMAGE
```
to
```
PDS_VERSION_ID =
DD_VERSION_ID =
LABEL_REVISION_NOTE =
DATA_SET_ID =
PRODUCT_ID =
INSTRUMENT_HOST_NAME =
^IMAGE
``` | planetarypy/pvl | diff --git a/tests/test_collections.py b/tests/test_collections.py
index 50df73d..18b5a4d 100644
--- a/tests/test_collections.py
+++ b/tests/test_collections.py
@@ -358,14 +358,25 @@ def test_py2_items():
('a', 3),
])
- assert isinstance(module.items(), list)
- assert module.items() == [('a', 1), ('b', 2), ('a', 3)]
-
- assert isinstance(module.keys(), list)
- assert module.keys() == ['a', 'b', 'a']
-
- assert isinstance(module.values(), list)
- assert module.values() == [1, 2, 3]
+ items = module.items()
+ assert isinstance(items, list)
+ assert items == [('a', 1), ('b', 2), ('a', 3)]
+ assert items.index(('a', 1)) == 0
+ assert items.index(('b', 2)) == 1
+ assert items.index(('a', 3)) == 2
+
+ keys = module.keys()
+ assert isinstance(keys, list)
+ assert keys == ['a', 'b', 'a']
+ assert keys.index('a') == 0
+ assert keys.index('b') == 1
+
+ values = module.values()
+ assert isinstance(values, list)
+ assert values == [1, 2, 3]
+ assert values.index(1) == 0
+ assert values.index(2) == 1
+ assert values.index(3) == 2
@pytest.mark.skipif(six.PY2, reason='requires python3')
@@ -391,14 +402,30 @@ def test_py3_items():
])
assert isinstance(module.items(), pvl._collections.ItemsView)
- assert module.items()[0] == ('a', 1)
+ items = module.items()
+ assert items[0] == ('a', 1)
+ assert items[1] == ('b', 2)
+ assert items[2] == ('a', 3)
+ assert items.index(('a', 1)) == 0
+ assert items.index(('b', 2)) == 1
+ assert items.index(('a', 3)) == 2
assert isinstance(module.keys(), pvl._collections.KeysView)
- assert module.keys()[0] == 'a'
+ keys = module.keys()
+ assert keys[0] == 'a'
+ assert keys[1] == 'b'
+ assert keys[2] == 'a'
+ assert keys.index('a') == 0
+ assert keys.index('b') == 1
assert isinstance(module.values(), pvl._collections.ValuesView)
- assert module.values()[0] == 1
-
+ values = module.values()
+ assert values[0] == 1
+ assert values[1] == 2
+ assert values[2] == 3
+ assert values.index(1) == 0
+ assert values.index(2) == 1
+ assert values.index(3) == 2
if six.PY3:
@@ -549,3 +576,153 @@ def test_conversion():
assert dict(module) == expected_dict
assert list(module) == expected_list
+
+
[email protected](
+ 'expected_label, key, instance, expected_list, expected_value', [
+ ([
+ ('a', 4),
+ ('a', 1),
+ ('b', 2),
+ ('a', 3),
+ ('c', 5),
+ ], 'a', 0, [4, 1, 3], 4),
+ ([
+ ('a', 1),
+ ('a', 4),
+ ('b', 2),
+ ('a', 3),
+ ('c', 5),
+ ], 'b', 0, [1, 4, 3], 1),
+ ([
+ ('a', 1),
+ ('b', 2),
+ ('a', 4),
+ ('a', 3),
+ ('c', 5),
+ ], 'a', 1, [1, 4, 3], 1),
+ ([
+ ('a', 1),
+ ('b', 2),
+ ('a', 3),
+ ('a', 4),
+ ('c', 5),
+ ], 'c', 0, [1, 3, 4], 1)
+ ])
+def test_insert_before(expected_label, key, instance, expected_list,
+ expected_value):
+ module1 = pvl.PVLModule([
+ ('a', 1),
+ ('b', 2),
+ ('a', 3),
+ ('c', 5),
+ ])
+ module2 = module1.copy()
+
+ expected_module = pvl.PVLModule(expected_label)
+
+ module1.insert_before(key, [('a', 4)], instance)
+ assert expected_module == module1
+ assert module1['a'] == expected_value
+ assert module1.getlist('a') == expected_list
+
+ module2.insert_before(key, pvl.PVLModule([('a', 4)]), instance)
+ assert module2 == expected_module
+ assert module1['a'] == expected_value
+ assert module1.getlist('a') == expected_list
+
+
[email protected](
+ 'expected_label, key, instance, expected_list, expected_value', [
+ ([
+ ('a', 1),
+ ('a', 4),
+ ('b', 2),
+ ('a', 3),
+ ('c', 5),
+ ], 'a', 0, [1, 4, 3], 1),
+ ([
+ ('a', 1),
+ ('b', 2),
+ ('a', 4),
+ ('a', 3),
+ ('c', 5),
+ ], 'b', 0, [1, 4, 3], 1),
+ ([
+ ('a', 1),
+ ('b', 2),
+ ('a', 3),
+ ('a', 4),
+ ('c', 5),
+ ], 'a', 1, [1, 3, 4], 1),
+ ([
+ ('a', 1),
+ ('b', 2),
+ ('a', 3),
+ ('c', 5),
+ ('a', 4),
+ ], 'c', 0, [1, 3, 4], 1)
+ ])
+def test_insert_after(expected_label, key, instance, expected_list,
+ expected_value):
+ module1 = pvl.PVLModule([
+ ('a', 1),
+ ('b', 2),
+ ('a', 3),
+ ('c', 5),
+ ])
+ module2 = module1.copy()
+
+ expected_module = pvl.PVLModule(expected_label)
+
+ module1.insert_after(key, [('a', 4)], instance)
+ assert expected_module == module1
+ assert module1['a'] == expected_value
+ assert module1.getlist('a') == expected_list
+
+ module2.insert_after(key, pvl.PVLModule([('a', 4)]), instance)
+ assert module2 == expected_module
+ assert module1['a'] == expected_value
+ assert module1.getlist('a') == expected_list
+
+
[email protected](
+ 'key, instance, expected_index', [
+ ('a', 0, 0),
+ ('b', 0, 1),
+ ('a', 1, 2)
+ ])
+def test_get_index_for_insert(key, instance, expected_index):
+ module = pvl.PVLModule([
+ ('a', 1),
+ ('b', 2),
+ ('a', 3),
+ ])
+
+ module._get_index_for_insert(key, instance) == expected_index
+
+
+def test_insert_raises():
+ module = pvl.PVLModule([
+ ('a', 1),
+ ('b', 2),
+ ('a', 3),
+ ])
+
+ with pytest.raises(KeyError):
+ module.insert_before('error_key', [('foo', 'bar')])
+
+ with pytest.raises(KeyError):
+ module.insert_after('error_key', [('foo', 'bar')])
+
+ with pytest.raises(TypeError):
+ module.insert_before('a', ('foo', 'bar'))
+
+ with pytest.raises(TypeError):
+ module.insert_after('a', ('foo', 'bar'))
+
+ with pytest.raises(ValueError):
+ module.insert_before('a', [('foo', 'bar')], 2)
+
+ with pytest.raises(ValueError):
+ module.insert_after('a', [('foo', 'bar')], 2)
diff --git a/tests/test_encoder.py b/tests/test_encoder.py
index 1dee8ff..76fbf1e 100644
--- a/tests/test_encoder.py
+++ b/tests/test_encoder.py
@@ -111,3 +111,33 @@ def test_quoated_strings():
encoder = pvl.encoder.PDSLabelEncoder
assert module == pvl.loads(pvl.dumps(module, cls=encoder))
+
+
+def test_dump_to_file_insert_before():
+ tmpdir = tempfile.mkdtemp()
+
+ try:
+ for filename in PDS_LABELS:
+ label = pvl.load(filename)
+ if os.path.basename(filename) != 'empty.lbl':
+ label.insert_before('PDS_VERSION_ID', [('new', 'item')])
+ tmpfile = os.path.join(tmpdir, os.path.basename(filename))
+ pvl.dump(label, tmpfile)
+ assert label == pvl.load(tmpfile)
+ finally:
+ shutil.rmtree(tmpdir)
+
+
+def test_dump_to_file_insert_after():
+ tmpdir = tempfile.mkdtemp()
+
+ try:
+ for filename in PDS_LABELS:
+ label = pvl.load(filename)
+ if os.path.basename(filename) != 'empty.lbl':
+ label.insert_after('PDS_VERSION_ID', [('new', 'item')])
+ tmpfile = os.path.join(tmpdir, os.path.basename(filename))
+ pvl.dump(label, tmpfile)
+ assert label == pvl.load(tmpfile)
+ finally:
+ shutil.rmtree(tmpdir)
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 3
},
"num_modified_files": 1
} | 0.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"flake8"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
coverage==6.2
distlib==0.3.9
filelock==3.4.1
flake8==5.0.4
importlib-metadata==4.2.0
importlib-resources==5.4.0
iniconfig==1.1.1
mccabe==0.7.0
packaging==21.3
platformdirs==2.4.0
pluggy==1.0.0
-e git+https://github.com/planetarypy/pvl.git@27b4128d10b4dcc0f14c64fcc61bedc2e27030bf#egg=pvl
py==1.11.0
pycodestyle==2.9.1
pyflakes==2.5.0
pyparsing==3.1.4
pytest==7.0.1
pytest-cov==4.0.0
pytz==2025.2
six==1.17.0
toml==0.10.2
tomli==1.2.3
tox==3.28.0
typing_extensions==4.1.1
virtualenv==20.16.2
zipp==3.6.0
| name: pvl
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- coverage==6.2
- distlib==0.3.9
- filelock==3.4.1
- flake8==5.0.4
- importlib-metadata==4.2.0
- importlib-resources==5.4.0
- iniconfig==1.1.1
- mccabe==0.7.0
- packaging==21.3
- platformdirs==2.4.0
- pluggy==1.0.0
- py==1.11.0
- pycodestyle==2.9.1
- pyflakes==2.5.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-cov==4.0.0
- pytz==2025.2
- six==1.17.0
- toml==0.10.2
- tomli==1.2.3
- tox==3.28.0
- typing-extensions==4.1.1
- virtualenv==20.16.2
- zipp==3.6.0
prefix: /opt/conda/envs/pvl
| [
"tests/test_collections.py::test_py3_items",
"tests/test_collections.py::test_insert_before[expected_label0-a-0-expected_list0-4]",
"tests/test_collections.py::test_insert_before[expected_label1-b-0-expected_list1-1]",
"tests/test_collections.py::test_insert_before[expected_label2-a-1-expected_list2-1]",
"tests/test_collections.py::test_insert_before[expected_label3-c-0-expected_list3-1]",
"tests/test_collections.py::test_insert_after[expected_label0-a-0-expected_list0-1]",
"tests/test_collections.py::test_insert_after[expected_label1-b-0-expected_list1-1]",
"tests/test_collections.py::test_insert_after[expected_label2-a-1-expected_list2-1]",
"tests/test_collections.py::test_insert_after[expected_label3-c-0-expected_list3-1]",
"tests/test_collections.py::test_get_index_for_insert[a-0-0]",
"tests/test_collections.py::test_get_index_for_insert[b-0-1]",
"tests/test_collections.py::test_get_index_for_insert[a-1-2]",
"tests/test_collections.py::test_insert_raises",
"tests/test_encoder.py::test_dump_to_file_insert_before",
"tests/test_encoder.py::test_dump_to_file_insert_after"
]
| [
"tests/test_collections.py::test_conversion"
]
| [
"tests/test_collections.py::test_empty",
"tests/test_collections.py::test_list_creation",
"tests/test_collections.py::test_dict_creation",
"tests/test_collections.py::test_keyword_creation",
"tests/test_collections.py::test_key_access",
"tests/test_collections.py::test_index_access",
"tests/test_collections.py::test_slice_access",
"tests/test_collections.py::test_set",
"tests/test_collections.py::test_delete",
"tests/test_collections.py::test_clear",
"tests/test_collections.py::test_discard",
"tests/test_collections.py::test_pop",
"tests/test_collections.py::test_popitem",
"tests/test_collections.py::test_update",
"tests/test_collections.py::test_append",
"tests/test_collections.py::test_len",
"tests/test_collections.py::test_repr",
"tests/test_collections.py::test_iterators",
"tests/test_collections.py::test_equlity",
"tests/test_collections.py::test_copy",
"tests/test_encoder.py::test_dump_stream",
"tests/test_encoder.py::test_dump_to_file",
"tests/test_encoder.py::test_default_encoder",
"tests/test_encoder.py::test_cube_encoder",
"tests/test_encoder.py::test_pds_encoder",
"tests/test_encoder.py::test_special_values",
"tests/test_encoder.py::test_special_strings",
"tests/test_encoder.py::test_unkown_value",
"tests/test_encoder.py::test_quoated_strings"
]
| []
| BSD 3-Clause "New" or "Revised" License | 1,149 | [
"pvl/_collections.py"
]
| [
"pvl/_collections.py"
]
|
|
hylang__hy-1266 | bb9f543246dfa519e03bb3b315a850973020c503 | 2017-04-05 00:39:44 | 5bf9ecfc5a40821eb15e2ac7f218ae276f81fa60 | diff --git a/NEWS b/NEWS
index 9ae36128..fd3a1e1a 100644
--- a/NEWS
+++ b/NEWS
@@ -23,6 +23,7 @@ Changes from 0.12.1
* `setv` no longer unnecessarily tries to get attributes
* `loop` no longer replaces string literals equal to "recur"
* The REPL now prints the correct value of `do` and `try` forms
+ * Fixed a crash when tokenizing a single quote followed by whitespace
[ Misc. Improvements ]
* New contrib module `hy-repr`
diff --git a/hy/lex/__init__.py b/hy/lex/__init__.py
index f1a74aae..8a73d874 100644
--- a/hy/lex/__init__.py
+++ b/hy/lex/__init__.py
@@ -34,7 +34,7 @@ def tokenize(buf):
except LexingError as e:
pos = e.getsourcepos()
raise LexException("Could not identify the next token.",
- pos.lineno, pos.colno)
+ pos.lineno, pos.colno, buf)
except LexException as e:
if e.source is None:
e.source = buf
diff --git a/hy/lex/exceptions.py b/hy/lex/exceptions.py
index 4c4b7600..2ef5660a 100644
--- a/hy/lex/exceptions.py
+++ b/hy/lex/exceptions.py
@@ -24,12 +24,12 @@ from hy.errors import HyError
class LexException(HyError):
"""Error during the Lexing of a Hython expression."""
- def __init__(self, message, lineno, colno):
+ def __init__(self, message, lineno, colno, source=None):
super(LexException, self).__init__(message)
self.message = message
self.lineno = lineno
self.colno = colno
- self.source = None
+ self.source = source
self.filename = '<stdin>'
def __str__(self):
| Single quote crashes hy2py
## x.hy
```
'
```
## `hy2py x.hy`
```
Traceback (most recent call last):
File "<string>", line 1, in <module>
File "hy/cmdline.py", line 406, in hy2py_main
if stdin_text is None
File "hy/cmdline.py", line 186, in pretty_error
print(e, file=sys.stderr)
File "hy/lex/exceptions.py", line 43, in __str__
source = self.source.split("\n")
AttributeError: 'NoneType' object has no attribute 'split'
``` | hylang/hy | diff --git a/tests/test_lex.py b/tests/test_lex.py
index 96918883..514696b8 100644
--- a/tests/test_lex.py
+++ b/tests/test_lex.py
@@ -63,6 +63,17 @@ def test_unbalanced_exception():
pass
+def test_lex_single_quote_err():
+ "Ensure tokenizing \"' \" throws a LexException that can be stringified"
+ # https://github.com/hylang/hy/issues/1252
+ try:
+ tokenize("' ")
+ except LexException as e:
+ assert "Could not identify the next token" in str(e)
+ else:
+ assert False
+
+
def test_lex_expression_symbols():
""" Make sure that expressions produce symbols """
objs = tokenize("(foo bar)")
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 3
} | 0.12 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"nose",
"tox",
"pytest"
],
"pre_install": [],
"python": "3.6",
"reqs_path": [
"requirements-dev.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | alabaster==0.7.13
appdirs==1.4.4
args==0.1.0
astor==0.8.1
attrs==22.2.0
Babel==2.11.0
certifi==2021.5.30
charset-normalizer==2.0.12
clint==0.5.1
coverage==6.2
distlib==0.3.9
docutils==0.17.1
filelock==3.4.1
flake8==5.0.4
-e git+https://github.com/hylang/hy.git@bb9f543246dfa519e03bb3b315a850973020c503#egg=hy
idna==3.10
imagesize==1.4.1
importlib-metadata==4.2.0
importlib-resources==5.4.0
iniconfig==1.1.1
Jinja2==3.0.3
MarkupSafe==2.0.1
mccabe==0.7.0
nose==1.3.7
packaging==21.3
platformdirs==2.4.0
pluggy==1.0.0
py==1.11.0
pycodestyle==2.9.1
pyflakes==2.5.0
Pygments==2.14.0
pyparsing==3.1.4
pytest==7.0.1
pytz==2025.2
requests==2.27.1
rply==0.7.8
six==1.17.0
snowballstemmer==2.2.0
Sphinx==4.3.2
sphinx-rtd-theme==1.3.0
sphinxcontrib-applehelp==1.0.2
sphinxcontrib-devhelp==1.0.2
sphinxcontrib-htmlhelp==2.0.0
sphinxcontrib-jquery==4.1
sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==1.0.3
sphinxcontrib-serializinghtml==1.1.5
toml==0.10.2
tomli==1.2.3
tox==3.28.0
typing_extensions==4.1.1
urllib3==1.26.20
virtualenv==20.16.2
zipp==3.6.0
| name: hy
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- alabaster==0.7.13
- appdirs==1.4.4
- args==0.1.0
- astor==0.8.1
- attrs==22.2.0
- babel==2.11.0
- charset-normalizer==2.0.12
- clint==0.5.1
- coverage==6.2
- distlib==0.3.9
- docutils==0.17.1
- filelock==3.4.1
- flake8==5.0.4
- idna==3.10
- imagesize==1.4.1
- importlib-metadata==4.2.0
- importlib-resources==5.4.0
- iniconfig==1.1.1
- jinja2==3.0.3
- markupsafe==2.0.1
- mccabe==0.7.0
- nose==1.3.7
- packaging==21.3
- platformdirs==2.4.0
- pluggy==1.0.0
- py==1.11.0
- pycodestyle==2.9.1
- pyflakes==2.5.0
- pygments==2.14.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytz==2025.2
- requests==2.27.1
- rply==0.7.8
- six==1.17.0
- snowballstemmer==2.2.0
- sphinx==4.3.2
- sphinx-rtd-theme==1.3.0
- sphinxcontrib-applehelp==1.0.2
- sphinxcontrib-devhelp==1.0.2
- sphinxcontrib-htmlhelp==2.0.0
- sphinxcontrib-jquery==4.1
- sphinxcontrib-jsmath==1.0.1
- sphinxcontrib-qthelp==1.0.3
- sphinxcontrib-serializinghtml==1.1.5
- toml==0.10.2
- tomli==1.2.3
- tox==3.28.0
- typing-extensions==4.1.1
- urllib3==1.26.20
- virtualenv==20.16.2
- zipp==3.6.0
prefix: /opt/conda/envs/hy
| [
"tests/test_lex.py::test_lex_single_quote_err"
]
| []
| [
"tests/test_lex.py::test_lex_exception",
"tests/test_lex.py::test_unbalanced_exception",
"tests/test_lex.py::test_lex_expression_symbols",
"tests/test_lex.py::test_lex_expression_strings",
"tests/test_lex.py::test_lex_expression_integer",
"tests/test_lex.py::test_lex_symbols",
"tests/test_lex.py::test_lex_strings",
"tests/test_lex.py::test_lex_integers",
"tests/test_lex.py::test_lex_fractions",
"tests/test_lex.py::test_lex_expression_float",
"tests/test_lex.py::test_lex_expression_complex",
"tests/test_lex.py::test_lex_digit_separators",
"tests/test_lex.py::test_lex_line_counting",
"tests/test_lex.py::test_lex_line_counting_multi",
"tests/test_lex.py::test_lex_line_counting_multi_inner",
"tests/test_lex.py::test_dicts",
"tests/test_lex.py::test_sets",
"tests/test_lex.py::test_nospace",
"tests/test_lex.py::test_escapes",
"tests/test_lex.py::test_unicode_escapes",
"tests/test_lex.py::test_hashbang",
"tests/test_lex.py::test_complex",
"tests/test_lex.py::test_reader_macro",
"tests/test_lex.py::test_lex_comment_382",
"tests/test_lex.py::test_lex_mangling_star",
"tests/test_lex.py::test_lex_mangling_hyphen",
"tests/test_lex.py::test_lex_mangling_qmark",
"tests/test_lex.py::test_lex_mangling_bang",
"tests/test_lex.py::test_unmangle",
"tests/test_lex.py::test_simple_cons",
"tests/test_lex.py::test_dotted_list",
"tests/test_lex.py::test_cons_list"
]
| []
| MIT License | 1,150 | [
"NEWS",
"hy/lex/__init__.py",
"hy/lex/exceptions.py"
]
| [
"NEWS",
"hy/lex/__init__.py",
"hy/lex/exceptions.py"
]
|
|
nipy__nipype-1937 | d68b929ae1f36f2704fa7cddd029b308acf8587a | 2017-04-05 00:47:13 | 14161a590a3166b5a9c0f4afd42ff1acf843a960 | diff --git a/nipype/interfaces/utility/base.py b/nipype/interfaces/utility/base.py
index 45261c998..03442df20 100644
--- a/nipype/interfaces/utility/base.py
+++ b/nipype/interfaces/utility/base.py
@@ -99,13 +99,29 @@ class IdentityInterface(IOBase):
class MergeInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec):
axis = traits.Enum('vstack', 'hstack', usedefault=True,
desc='direction in which to merge, hstack requires same number of elements in each input')
- no_flatten = traits.Bool(False, usedefault=True, desc='append to outlist instead of extending in vstack mode')
+ no_flatten = traits.Bool(False, usedefault=True,
+ desc='append to outlist instead of extending in vstack mode')
+ ravel_inputs = traits.Bool(False, usedefault=True,
+ desc='ravel inputs when no_flatten is False')
class MergeOutputSpec(TraitedSpec):
out = traits.List(desc='Merged output')
+def _ravel(in_val):
+ if not isinstance(in_val, list):
+ return in_val
+ flat_list = []
+ for val in in_val:
+ raveled_val = _ravel(val)
+ if isinstance(raveled_val, list):
+ flat_list.extend(raveled_val)
+ else:
+ flat_list.append(raveled_val)
+ return flat_list
+
+
class Merge(IOBase):
"""Basic interface class to merge inputs into a single list
@@ -123,23 +139,34 @@ class Merge(IOBase):
>>> out.outputs.out
[1, 2, 5, 3]
- >>> merge = Merge() # Or Merge(1)
- >>> merge.inputs.in_lists = [1, [2, 5], 3]
+ >>> merge = Merge(1)
+ >>> merge.inputs.in1 = [1, [2, 5], 3]
+ >>> out = merge.run()
+ >>> out.outputs.out
+ [1, [2, 5], 3]
+
+ >>> merge = Merge(1)
+ >>> merge.inputs.in1 = [1, [2, 5], 3]
+ >>> merge.inputs.ravel_inputs = True
>>> out = merge.run()
>>> out.outputs.out
[1, 2, 5, 3]
+ >>> merge = Merge(1)
+ >>> merge.inputs.in1 = [1, [2, 5], 3]
+ >>> merge.inputs.no_flatten = True
+ >>> out = merge.run()
+ >>> out.outputs.out
+ [[1, [2, 5], 3]]
"""
input_spec = MergeInputSpec
output_spec = MergeOutputSpec
- def __init__(self, numinputs=1, **inputs):
+ def __init__(self, numinputs=0, **inputs):
super(Merge, self).__init__(**inputs)
self._numinputs = numinputs
- if numinputs > 1:
+ if numinputs >= 1:
input_names = ['in%d' % (i + 1) for i in range(numinputs)]
- elif numinputs == 1:
- input_names = ['in_lists']
else:
input_names = []
add_traits(self.inputs, input_names)
@@ -150,8 +177,6 @@ class Merge(IOBase):
if self._numinputs < 1:
return outputs
- elif self._numinputs == 1:
- values = self.inputs.in_lists
else:
getval = lambda idx: getattr(self.inputs, 'in%d' % (idx + 1))
values = [getval(idx) for idx in range(self._numinputs)
@@ -160,7 +185,8 @@ class Merge(IOBase):
if self.inputs.axis == 'vstack':
for value in values:
if isinstance(value, list) and not self.inputs.no_flatten:
- out.extend(value)
+ out.extend(_ravel(value) if self.inputs.ravel_inputs else
+ value)
else:
out.append(value)
else:
| New behaviour of Merge interface with breaks generic cases where numinputs=1 at runtime
### Summary
In the current HEAD, the new behaviour of the Merge interface when `numinputs==1` breaks cases (such as mine) where the `numinputs` is set at runtime depending on parameters of the workflow creation code. A better way to handle this would be to see if `in_lists` is defined or perhaps use `numinputs`==None.
### Actual behavior
`TypeError: '_Undefined' object is not iterable`
### Expected behavior
`in1` is taken to be the only element of the merged list
### How to replicate the behavior
```
import nipype.pipeline.engine as pe
from nipype.interfaces.utility.base import Merge
merge = pe.Node(Merge(numinputs=1), name='merge')
merge.inputs.in1 = 1
workflow = pe.Workflow(name='workflow')
workflow.add_nodes([merge])
workflow.run()
```
### Platform details:
please paste the output of: `python -c "import nipype; print(nipype.get_info()); print(nipype.__version__)"`
{'nibabel_version': '2.0.1', 'sys_executable': '/usr/local/opt/python/bin/python2.7', 'networkx_version': '1.11', 'numpy_version': '1.12.0', 'sys_platform': 'darwin', 'sys_version': '2.7.13 (default, Dec 17 2016, 23:03:43) \n[GCC 4.2.1 Compatible Apple LLVM 8.0.0 (clang-800.0.42.1)]', 'commit_source': u'repository', 'commit_hash': '368e184', 'pkg_path': '/Users/tclose/git/nipype/nipype', 'nipype_version': u'0.13.0-g368e184.dev', 'traits_version': '4.6.0', 'scipy_version': '0.14.0'}
0.13.0-g368e184.dev | nipy/nipype | diff --git a/nipype/interfaces/utility/tests/test_base.py b/nipype/interfaces/utility/tests/test_base.py
index 68f4bed44..3d2fbd2b5 100644
--- a/nipype/interfaces/utility/tests/test_base.py
+++ b/nipype/interfaces/utility/tests/test_base.py
@@ -56,25 +56,20 @@ def test_split(tmpdir, args, expected):
([3], {}, [0, [1, 2], [3, 4, 5]], [0, 1, 2, 3, 4, 5]),
([0], {}, None, None),
([], {}, [], []),
- ([], {}, [0, [1, 2], [3, 4, 5]], [0, 1, 2, 3, 4, 5]),
+ ([], {}, [0, [1, 2], [3, 4, 5]], [0, [1, 2], [3, 4, 5]]),
([3], {'axis': 'hstack'}, [[0], [1, 2], [3, 4, 5]], [[0, 1, 3]]),
([3], {'axis': 'hstack'}, [[0, 1], [2, 3], [4, 5]],
[[0, 2, 4], [1, 3, 5]]),
([3], {'axis': 'hstack'}, [[0, 1], [2, 3], [4, 5]],
[[0, 2, 4], [1, 3, 5]]),
- ([1], {'axis': 'hstack'}, [[0], [1, 2], [3, 4, 5]], [[0, 1, 3]]),
- ([1], {'axis': 'hstack'}, [[0, 1], [2, 3], [4, 5]],
- [[0, 2, 4], [1, 3, 5]]),
])
def test_merge(tmpdir, args, kwargs, in_lists, expected):
os.chdir(str(tmpdir))
node = pe.Node(utility.Merge(*args, **kwargs), name='merge')
- numinputs = args[0] if args else 1
- if numinputs == 1:
- node.inputs.in_lists = in_lists
- elif numinputs > 1:
+ numinputs = args[0] if args else 0
+ if numinputs >= 1:
for i in range(1, numinputs + 1):
setattr(node.inputs, 'in{:d}'.format(i), in_lists[i - 1])
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 0,
"test_score": 2
},
"num_modified_files": 1
} | 0.13 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.6",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
click==8.0.4
configparser==5.2.0
coverage==6.2
decorator==4.4.2
funcsigs==1.0.2
future==1.0.0
importlib-metadata==4.8.3
iniconfig==1.1.1
isodate==0.6.1
lxml==5.3.1
networkx==2.5.1
nibabel==3.2.2
-e git+https://github.com/nipy/nipype.git@d68b929ae1f36f2704fa7cddd029b308acf8587a#egg=nipype
numpy==1.19.5
packaging==21.3
pluggy==1.0.0
prov==2.0.1
psutil==7.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
pytest-cov==4.0.0
python-dateutil==2.9.0.post0
rdflib==5.0.0
scipy==1.5.4
simplejson==3.20.1
six==1.17.0
tomli==1.2.3
traits==6.4.1
typing_extensions==4.1.1
zipp==3.6.0
| name: nipype
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- click==8.0.4
- configparser==5.2.0
- coverage==6.2
- decorator==4.4.2
- funcsigs==1.0.2
- future==1.0.0
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- isodate==0.6.1
- lxml==5.3.1
- networkx==2.5.1
- nibabel==3.2.2
- numpy==1.19.5
- packaging==21.3
- pluggy==1.0.0
- prov==2.0.1
- psutil==7.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-cov==4.0.0
- python-dateutil==2.9.0.post0
- rdflib==5.0.0
- scipy==1.5.4
- simplejson==3.20.1
- six==1.17.0
- tomli==1.2.3
- traits==6.4.1
- typing-extensions==4.1.1
- zipp==3.6.0
prefix: /opt/conda/envs/nipype
| [
"nipype/interfaces/utility/tests/test_base.py::test_merge[args2-kwargs2-in_lists2-expected2]",
"nipype/interfaces/utility/tests/test_base.py::test_merge[args3-kwargs3-in_lists3-expected3]"
]
| []
| [
"nipype/interfaces/utility/tests/test_base.py::test_rename",
"nipype/interfaces/utility/tests/test_base.py::test_split[args0-expected0]",
"nipype/interfaces/utility/tests/test_base.py::test_split[args1-expected1]",
"nipype/interfaces/utility/tests/test_base.py::test_merge[args0-kwargs0-in_lists0-expected0]",
"nipype/interfaces/utility/tests/test_base.py::test_merge[args1-kwargs1-None-None]",
"nipype/interfaces/utility/tests/test_base.py::test_merge[args4-kwargs4-in_lists4-expected4]",
"nipype/interfaces/utility/tests/test_base.py::test_merge[args5-kwargs5-in_lists5-expected5]",
"nipype/interfaces/utility/tests/test_base.py::test_merge[args6-kwargs6-in_lists6-expected6]"
]
| []
| Apache License 2.0 | 1,151 | [
"nipype/interfaces/utility/base.py"
]
| [
"nipype/interfaces/utility/base.py"
]
|
|
EliCDavis__AssociationEngine-52 | f0df6c26fad05a818edfebaafb81b9342919cf9e | 2017-04-05 02:08:48 | f0df6c26fad05a818edfebaafb81b9342919cf9e | coveralls:
[](https://coveralls.io/builds/10934644)
Coverage increased (+0.2%) to 91.121% when pulling **e42eb4bffb2195b8b5481e54eb43b4994126606b on snapper-manager-wrappers** into **f0df6c26fad05a818edfebaafb81b9342919cf9e on master**.
| diff --git a/Snapper/Manager.py b/Snapper/Manager.py
index cc9b635..e503ecf 100644
--- a/Snapper/Manager.py
+++ b/Snapper/Manager.py
@@ -61,6 +61,22 @@ class Manager:
"""
return self.matrix
+ def get_value_matrix(self):
+ """
+ Returns the underlying matrix on demand.
+
+ :return:
+ """
+ return self.matrix.get_value_matrix()
+
+ def get_relationships_by_value_range(self, minvalue, maxvalue):
+ """
+ Returns the underlying matrix on demand.
+
+ :return:
+ """
+ return self.matrix.get_relationships_by_value_range(minvalue, maxvalue)
+
def on_data(self, snapshot):
"""
Routes all data from incoming snapshot to the appropriate variables.
| Create Wrapper Methods for Matrix in Manager
Need to implement wrappers for Matrix methods:
- [x] get_value_matrix
- [x] get_relationships_by_value_range
This needs to be done to avoid the end user needing to acquire an instance of AssociationMatrix and operate on it directly. Instead, the end user should only ever interact directly with an instance of Manager. | EliCDavis/AssociationEngine | diff --git a/Tests/Manager_test.py b/Tests/Manager_test.py
index 66d5d57..4a48dc5 100644
--- a/Tests/Manager_test.py
+++ b/Tests/Manager_test.py
@@ -75,3 +75,23 @@ def test_push_snapshot():
variable1.on_data.assert_called_with(1)
variable2.on_data.assert_called_with(2)
+
+
+def test_get_value_matrix():
+ manager = Manager()
+
+ manager.matrix.get_value_matrix = MagicMock()
+
+ manager.get_value_matrix()
+
+ manager.matrix.get_value_matrix.assert_called_with()
+
+
+def test_get_relationships_by_value_range():
+ manager = Manager()
+
+ manager.matrix.get_relationships_by_value_range = MagicMock()
+
+ manager.get_relationships_by_value_range(0.5, 1)
+
+ manager.matrix.get_relationships_by_value_range.assert_called_with(0.5, 1)
| {
"commit_name": "merge_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 0,
"test_score": 3
},
"num_modified_files": 1
} | unknown | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "scipy numpy",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | -e git+https://github.com/EliCDavis/AssociationEngine.git@f0df6c26fad05a818edfebaafb81b9342919cf9e#egg=association_engine
exceptiongroup==1.2.2
iniconfig==2.1.0
numpy @ file:///croot/numpy_and_numpy_base_1736283260865/work/dist/numpy-2.0.2-cp39-cp39-linux_x86_64.whl#sha256=3387e3e62932fa288bc18e8f445ce19e998b418a65ed2064dd40a054f976a6c7
packaging==24.2
pluggy==1.5.0
pytest==8.3.5
scipy @ file:///croot/scipy_1733756309941/work/dist/scipy-1.13.1-cp39-cp39-linux_x86_64.whl#sha256=3b247b926209f2d9f719ebae39faf3ff891b2596150ed8f8349adfc3eb19441c
tomli==2.2.1
| name: AssociationEngine
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- blas=1.0=openblas
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgfortran-ng=11.2.0=h00389a5_1
- libgfortran5=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libopenblas=0.3.21=h043d6bf_0
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- numpy=2.0.2=py39heeff2f4_0
- numpy-base=2.0.2=py39h8a23956_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- pybind11-abi=4=hd3eb1b0_1
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- scipy=1.13.1=py39heeff2f4_1
- setuptools=72.1.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- exceptiongroup==1.2.2
- iniconfig==2.1.0
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- tomli==2.2.1
prefix: /opt/conda/envs/AssociationEngine
| [
"Tests/Manager_test.py::test_get_value_matrix",
"Tests/Manager_test.py::test_get_relationships_by_value_range"
]
| []
| [
"Tests/Manager_test.py::test_should_initialize_snapper_and_matrix",
"Tests/Manager_test.py::test_add_and_remove_sensor_variable_pair",
"Tests/Manager_test.py::test_add_multiple_sensors",
"Tests/Manager_test.py::test_return_matrix",
"Tests/Manager_test.py::test_push_snapshot"
]
| []
| null | 1,152 | [
"Snapper/Manager.py"
]
| [
"Snapper/Manager.py"
]
|
jboss-dockerfiles__dogen-101 | 472aa64387e01f6fa7bc529fd8b9f5fc12b5dee7 | 2017-04-05 14:20:24 | bc9263c5b683fdf901ac1646286ee3908b85dcdc | diff --git a/dogen/templates/template.jinja b/dogen/templates/template.jinja
index 84e3c4c..170fd59 100644
--- a/dogen/templates/template.jinja
+++ b/dogen/templates/template.jinja
@@ -63,9 +63,10 @@ COPY repos/*.repo /etc/yum.repos.d/
COPY *.rpm /tmp/rpms/
{% endif %}
-# Install required RPMs
+# Install required RPMs and ensure that the packages were installed
RUN yum install -y {% if additional_repos -%} --disablerepo=\* {%- for repo in additional_repos %} --enablerepo={{ repo }}{% endfor %}{% endif %} {%- for package in packages %} {{ package }}{% endfor %} {%- for rpm in rpms %} /tmp/rpms/{{ rpm }} {% endfor %} \
- && yum clean all
+ && yum clean all && \
+ rpm -q {% for package in packages %} {{ package }}{% endfor %}
{% if additional_repos %}
# Remove custom repo files
| force a failure if a package installation fails
Yum has some annoying behaviour, if you perform `yum install foo bar baz` and at least one package exists/succeeds, then it will return success. This masks some error conditions: missing package, possible missing repos files etc.
We should generate some additional CMDs to ensure that we bail out if a package does not exist or install. | jboss-dockerfiles/dogen | diff --git a/tests/test_package.py b/tests/test_package.py
new file mode 100644
index 0000000..b742e65
--- /dev/null
+++ b/tests/test_package.py
@@ -0,0 +1,50 @@
+import argparse
+import mock
+import os
+import tempfile
+import unittest
+import shutil
+import re
+import sys
+
+from dogen.plugins.repo import Repo
+from dogen.generator import Generator
+
+class TestPackage(unittest.TestCase):
+ def setUp(self):
+ self.workdir = tempfile.mkdtemp(prefix='test_repo_plugin')
+ self.descriptor = tempfile.NamedTemporaryFile(delete=False)
+ self.target_dir = os.path.join(self.workdir, "target")
+ self.log = mock.Mock()
+
+ def teardown(self):
+ shutil.rmtree(self.workdir)
+
+ def write_config(self, config):
+ with self.descriptor as f:
+ f.write(config.encode())
+
+ def prepare_dogen(self, repo_files_dir=None):
+ args = argparse.Namespace(path=self.descriptor.name, output=self.target_dir, without_sources=None,
+ template=None, scripts_path=None, additional_script=None,
+ skip_ssl_verification=None, repo_files_dir=repo_files_dir)
+ self.dogen = Generator(self.log, args, [Repo])
+
+ def test_custom_repo_files_should_add_two(self):
+ open(os.path.join(self.workdir, "fedora.repo"), 'a').close()
+ open(os.path.join(self.workdir, "test.repo"), 'a').close()
+
+ self.write_config("release: '1'\nversion: '1'\ncmd:\n - whoami\nfrom: scratch\nname: someimage\npackages:\n - wget")
+ self.prepare_dogen(self.workdir)
+ self.dogen.run()
+
+ self.assertIsNotNone(self.dogen.cfg)
+ self.assertIsNotNone(self.dogen.cfg.get('packages'))
+ self.assertIsInstance(self.dogen.cfg.get('packages'), list)
+ self.assertIn("wget", self.dogen.cfg.get('packages'))
+
+ dockerfile = open(os.path.join(self.target_dir, "Dockerfile")).read()
+
+ sys.stderr.write("\t\t\tDEBUGDEBUG\n{}\n".format(dockerfile))
+ self.assertTrue(re.match(r'.*yum install[^\n]+wget', dockerfile, re.DOTALL))
+ self.assertTrue(re.match(r'.*rpm -q +wget', dockerfile, re.DOTALL))
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 1
} | 2.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov",
"mock"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | certifi==2025.1.31
charset-normalizer==3.4.1
coverage==7.8.0
docopt==0.6.2
-e git+https://github.com/jboss-dockerfiles/dogen.git@472aa64387e01f6fa7bc529fd8b9f5fc12b5dee7#egg=dogen
exceptiongroup==1.2.2
idna==3.10
iniconfig==2.1.0
Jinja2==3.1.6
MarkupSafe==3.0.2
mock==5.2.0
packaging==24.2
pluggy==1.5.0
pykwalify==1.8.0
pytest==8.3.5
pytest-cov==6.0.0
python-dateutil==2.9.0.post0
PyYAML==6.0.2
requests==2.32.3
ruamel.yaml==0.18.10
ruamel.yaml.clib==0.2.12
six==1.17.0
tomli==2.2.1
urllib3==2.3.0
| name: dogen
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- certifi==2025.1.31
- charset-normalizer==3.4.1
- coverage==7.8.0
- docopt==0.6.2
- exceptiongroup==1.2.2
- idna==3.10
- iniconfig==2.1.0
- jinja2==3.1.6
- markupsafe==3.0.2
- mock==5.2.0
- packaging==24.2
- pluggy==1.5.0
- pykwalify==1.8.0
- pytest==8.3.5
- pytest-cov==6.0.0
- python-dateutil==2.9.0.post0
- pyyaml==6.0.2
- requests==2.32.3
- ruamel-yaml==0.18.10
- ruamel-yaml-clib==0.2.12
- six==1.17.0
- tomli==2.2.1
- urllib3==2.3.0
prefix: /opt/conda/envs/dogen
| [
"tests/test_package.py::TestPackage::test_custom_repo_files_should_add_two"
]
| []
| []
| []
| MIT License | 1,153 | [
"dogen/templates/template.jinja"
]
| [
"dogen/templates/template.jinja"
]
|
|
Azure__azure-cli-2773 | 6186044564dc3436b7551c58eda07db4412a49f8 | 2017-04-05 18:24:43 | eb12ac454cbe1ddb59c86cdf2045e1912660e750 | codecov-io: # [Codecov](https://codecov.io/gh/Azure/azure-cli/pull/2773?src=pr&el=h1) Report
> Merging [#2773](https://codecov.io/gh/Azure/azure-cli/pull/2773?src=pr&el=desc) into [master](https://codecov.io/gh/Azure/azure-cli/commit/8dda58ca3c3c1af427df59846384b6b7302acc07?src=pr&el=desc) will **increase** coverage by `0.02%`.
> The diff coverage is `100%`.
[](https://codecov.io/gh/Azure/azure-cli/pull/2773?src=pr&el=tree)
```diff
@@ Coverage Diff @@
## master #2773 +/- ##
==========================================
+ Coverage 62.83% 62.86% +0.02%
==========================================
Files 480 480
Lines 25783 25789 +6
Branches 3904 3904
==========================================
+ Hits 16201 16211 +10
+ Misses 8574 8566 -8
- Partials 1008 1012 +4
```
| [Impacted Files](https://codecov.io/gh/Azure/azure-cli/pull/2773?src=pr&el=tree) | Coverage Δ | |
|---|---|---|
| [...source/azure/cli/command\_modules/resource/\_help.py](https://codecov.io/gh/Azure/azure-cli/pull/2773?src=pr&el=tree#diff-c3JjL2NvbW1hbmRfbW9kdWxlcy9henVyZS1jbGktcmVzb3VyY2UvYXp1cmUvY2xpL2NvbW1hbmRfbW9kdWxlcy9yZXNvdXJjZS9faGVscC5weQ==) | `100% <ø> (ø)` | :arrow_up: |
| [...azure-cli-vm/azure/cli/command\_modules/vm/\_help.py](https://codecov.io/gh/Azure/azure-cli/pull/2773?src=pr&el=tree#diff-c3JjL2NvbW1hbmRfbW9kdWxlcy9henVyZS1jbGktdm0vYXp1cmUvY2xpL2NvbW1hbmRfbW9kdWxlcy92bS9faGVscC5weQ==) | `100% <100%> (ø)` | :arrow_up: |
| [...zure-cli-vm/azure/cli/command\_modules/vm/custom.py](https://codecov.io/gh/Azure/azure-cli/pull/2773?src=pr&el=tree#diff-c3JjL2NvbW1hbmRfbW9kdWxlcy9henVyZS1jbGktdm0vYXp1cmUvY2xpL2NvbW1hbmRfbW9kdWxlcy92bS9jdXN0b20ucHk=) | `72.99% <100%> (+0.42%)` | :arrow_up: |
| [src/azure-cli-core/azure/cli/core/util.py](https://codecov.io/gh/Azure/azure-cli/pull/2773?src=pr&el=tree#diff-c3JjL2F6dXJlLWNsaS1jb3JlL2F6dXJlL2NsaS9jb3JlL3V0aWwucHk=) | `68.99% <0%> (ø)` | :arrow_up: |
| [...dback/azure/cli/command\_modules/feedback/custom.py](https://codecov.io/gh/Azure/azure-cli/pull/2773?src=pr&el=tree#diff-c3JjL2NvbW1hbmRfbW9kdWxlcy9henVyZS1jbGktZmVlZGJhY2svYXp1cmUvY2xpL2NvbW1hbmRfbW9kdWxlcy9mZWVkYmFjay9jdXN0b20ucHk=) | `34.69% <0%> (ø)` | :arrow_up: |
| [...-cli-role/azure/cli/command\_modules/role/custom.py](https://codecov.io/gh/Azure/azure-cli/pull/2773?src=pr&el=tree#diff-c3JjL2NvbW1hbmRfbW9kdWxlcy9henVyZS1jbGktcm9sZS9henVyZS9jbGkvY29tbWFuZF9tb2R1bGVzL3JvbGUvY3VzdG9tLnB5) | `19.28% <0%> (ø)` | :arrow_up: |
------
[Continue to review full report at Codecov](https://codecov.io/gh/Azure/azure-cli/pull/2773?src=pr&el=continue).
> **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta)
> `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data`
> Powered by [Codecov](https://codecov.io/gh/Azure/azure-cli/pull/2773?src=pr&el=footer). Last update [8dda58c...8aacf90](https://codecov.io/gh/Azure/azure-cli/pull/2773?src=pr&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments). | diff --git a/azure-cli.pyproj b/azure-cli.pyproj
index bea939442..329dab752 100644
--- a/azure-cli.pyproj
+++ b/azure-cli.pyproj
@@ -600,6 +600,7 @@
<Folder Include="azure-cli-core\azure\cli\core\test_utils\" />
<Folder Include="azure-cli-core\tests\" />
<Folder Include="azure-cli-core\tests\__pycache__\" />
+ <Folder Include="azure-cli-nspkg\" />
<Folder Include="azure-cli-testsdk\" />
<Folder Include="azure-cli-testsdk\azure\" />
<Folder Include="azure-cli-testsdk\azure\cli\" />
@@ -788,17 +789,25 @@
<Folder Include="command_modules\azure-cli-dls\" />
</ItemGroup>
<ItemGroup>
+ <Content Include="azure-cli-core\HISTORY.rst" />
<Content Include="azure-cli-core\setup.cfg" />
+ <Content Include="azure-cli-nspkg\HISTORY.rst" />
+ <Content Include="azure-cli-testsdk\HISTORY.rst" />
<Content Include="azure-cli\az.completion.sh" />
+ <Content Include="azure-cli\HISTORY.rst" />
<Content Include="azure-cli\setup.cfg" />
<Content Include="command_modules\azure-cli-acr\azure\cli\command_modules\acr\template.json" />
+ <Content Include="command_modules\azure-cli-acr\HISTORY.rst" />
<Content Include="command_modules\azure-cli-acs\azure\cli\command_modules\acs\mgmt_acs\azuredeploy.json" />
<Content Include="command_modules\azure-cli-acs\azure\cli\command_modules\acs\mgmt_acs\swagger_create_acs.json" />
+ <Content Include="command_modules\azure-cli-acs\HISTORY.rst" />
+ <Content Include="command_modules\azure-cli-appservice\HISTORY.rst" />
<Content Include="command_modules\azure-cli-appservice\README.rst" />
<Content Include="command_modules\azure-cli-appservice\setup.cfg" />
<Content Include="command_modules\azure-cli-appservice\tests\sample_web\.gitignore" />
<Content Include="command_modules\azure-cli-appservice\tests\sample_web\package.json" />
<Content Include="command_modules\azure-cli-appservice\tests\sample_web\server.js" />
+ <Content Include="command_modules\azure-cli-batch\HISTORY.rst" />
<Content Include="command_modules\azure-cli-batch\tests\data\batch-pool-create-invalid.json" />
<Content Include="command_modules\azure-cli-batch\tests\data\batch-pool-create.json" />
<Content Include="command_modules\azure-cli-batch\tests\data\batch-pool-update.json" />
@@ -812,24 +821,35 @@
<Content Include="command_modules\azure-cli-batch\tests\data\batchtest.cer" />
<Content Include="command_modules\azure-cli-batch\tests\data\batchUpdatePool.json" />
<Content Include="command_modules\azure-cli-batch\tests\README.md" />
+ <Content Include="command_modules\azure-cli-cloud\HISTORY.rst" />
+ <Content Include="command_modules\azure-cli-component\HISTORY.rst" />
+ <Content Include="command_modules\azure-cli-configure\HISTORY.rst" />
+ <Content Include="command_modules\azure-cli-container\HISTORY.rst" />
<Content Include="command_modules\azure-cli-dla\HISTORY.rst" />
<Content Include="command_modules\azure-cli-dla\MANIFEST.in" />
<Content Include="command_modules\azure-cli-dla\README.rst" />
<Content Include="command_modules\azure-cli-dls\HISTORY.rst" />
<Content Include="command_modules\azure-cli-dls\MANIFEST.in" />
<Content Include="command_modules\azure-cli-dls\README.rst" />
+ <Content Include="command_modules\azure-cli-documentdb\HISTORY.rst" />
+ <Content Include="command_modules\azure-cli-feedback\HISTORY.rst" />
<Content Include="command_modules\azure-cli-find\HISTORY.rst" />
<Content Include="command_modules\azure-cli-find\MANIFEST.in" />
<Content Include="command_modules\azure-cli-find\README.rst" />
<Content Include="command_modules\azure-cli-find\setup.cfg" />
<Content Include="command_modules\azure-cli-iot\azure\cli\command_modules\iot\mgmt_iot_hub_device\swagger_iot_hub_device_identity.json" />
+ <Content Include="command_modules\azure-cli-iot\HISTORY.rst" />
+ <Content Include="command_modules\azure-cli-keyvault\HISTORY.rst" />
<Content Include="command_modules\azure-cli-keyvault\tests\policy.json" />
<Content Include="command_modules\azure-cli-keyvault\tests\policy2.json" />
<Content Include="command_modules\azure-cli-keyvault\tests\policy_import_pem.json" />
<Content Include="command_modules\azure-cli-keyvault\tests\policy_import_pfx.json" />
<Content Include="command_modules\azure-cli-keyvault\tests\policy_pending.json" />
<Content Include="command_modules\azure-cli-keyvault\tests\test_secret.txt" />
+ <Content Include="command_modules\azure-cli-lab\HISTORY.rst" />
<Content Include="command_modules\azure-cli-monitor\azure\cli\command_modules\monitor\autoscale-parameters-template.json" />
+ <Content Include="command_modules\azure-cli-monitor\HISTORY.rst" />
+ <Content Include="command_modules\azure-cli-network\HISTORY.rst" />
<Content Include="command_modules\azure-cli-network\tests\zone_files\fail1.txt" />
<Content Include="command_modules\azure-cli-network\tests\zone_files\fail2.txt" />
<Content Include="command_modules\azure-cli-network\tests\zone_files\fail3.txt" />
@@ -840,15 +860,23 @@
<Content Include="command_modules\azure-cli-network\tests\zone_files\zone3.txt" />
<Content Include="command_modules\azure-cli-network\tests\zone_files\zone4.txt" />
<Content Include="command_modules\azure-cli-network\tests\zone_files\zone5.txt" />
+ <Content Include="command_modules\azure-cli-profile\HISTORY.rst" />
+ <Content Include="command_modules\azure-cli-redis\HISTORY.rst" />
+ <Content Include="command_modules\azure-cli-resource\HISTORY.rst" />
<Content Include="command_modules\azure-cli-resource\tests\sample_policy_rule.json" />
<Content Include="command_modules\azure-cli-resource\tests\simple_deploy.json" />
<Content Include="command_modules\azure-cli-resource\tests\simple_deploy_parameters.json" />
+ <Content Include="command_modules\azure-cli-role\HISTORY.rst" />
+ <Content Include="command_modules\azure-cli-sql\HISTORY.rst" />
+ <Content Include="command_modules\azure-cli-storage\HISTORY.rst" />
<Content Include="command_modules\azure-cli-storage\tests\Readme.md" />
<Content Include="command_modules\azure-cli-storage\tests\scripts\purge_test_sample.sh" />
<Content Include="command_modules\azure-cli-storage\tests\scripts\run_integration.sh" />
<Content Include="command_modules\azure-cli-storage\tests\scripts\set_test_env.sh" />
<Content Include="command_modules\azure-cli-storage\tests\testfile.rst" />
<Content Include="command_modules\azure-cli-storage\tests\testpage.rst" />
+ <Content Include="command_modules\azure-cli-taskhelp\HISTORY.rst" />
+ <Content Include="command_modules\azure-cli-vm\HISTORY.rst" />
<Content Include="command_modules\azure-cli-vm\tests\aliases.json" />
<Content Include="command_modules\azure-cli-vm\tests\keyvault\policy.json" />
<Content Include="command_modules\azure-cli-vm\tests\sample-public.json" />
diff --git a/src/command_modules/azure-cli-acs/HISTORY.rst b/src/command_modules/azure-cli-acs/HISTORY.rst
index 787a4344c..553734f48 100644
--- a/src/command_modules/azure-cli-acs/HISTORY.rst
+++ b/src/command_modules/azure-cli-acs/HISTORY.rst
@@ -3,6 +3,11 @@
Release History
===============
+Unreleased
+++++++++++++++++++
+
+* remove windows profile before PUT call for scale command (#2755)
+
2.0.2 (2017-04-03)
++++++++++++++++++
diff --git a/src/command_modules/azure-cli-acs/azure/cli/command_modules/acs/custom.py b/src/command_modules/azure-cli-acs/azure/cli/command_modules/acs/custom.py
index d928b38ec..9c1bb80d1 100644
--- a/src/command_modules/azure-cli-acs/azure/cli/command_modules/acs/custom.py
+++ b/src/command_modules/azure-cli-acs/azure/cli/command_modules/acs/custom.py
@@ -764,6 +764,9 @@ def update_acs(client, resource_group_name, container_service_name, new_agent_co
if instance.orchestrator_profile.orchestrator_type == ContainerServiceOchestratorTypes.kubernetes:
instance.service_principal_profile = None
+ # null out the windows profile so that validation doesn't complain about not having the admin password
+ instance.windows_profile = None
+
return client.create_or_update(resource_group_name, container_service_name, instance)
diff --git a/src/command_modules/azure-cli-resource/azure/cli/command_modules/resource/_help.py b/src/command_modules/azure-cli-resource/azure/cli/command_modules/resource/_help.py
index bcff81d87..9709b10ea 100644
--- a/src/command_modules/azure-cli-resource/azure/cli/command_modules/resource/_help.py
+++ b/src/command_modules/azure-cli-resource/azure/cli/command_modules/resource/_help.py
@@ -5,7 +5,7 @@
from azure.cli.core.help_files import helps #pylint: disable=unused-import
-#pylint: disable=line-too-long
+#pylint: disable=line-too-long, too-many-lines
helps['lock'] = """
type: group
short-summary: Manage Azure locks.
diff --git a/src/command_modules/azure-cli-vm/HISTORY.rst b/src/command_modules/azure-cli-vm/HISTORY.rst
index 1d8f6777f..6894110ab 100644
--- a/src/command_modules/azure-cli-vm/HISTORY.rst
+++ b/src/command_modules/azure-cli-vm/HISTORY.rst
@@ -5,6 +5,7 @@ Release History
2.0.3 (unreleased)
++++++++++++++++++
* vm/vmss: support create from a market place image which requires plan info(#1209)
+* Fix bug with `vmss update` and `vm availability-set update`
2.0.2 (2017-04-03)
++++++++++++++++++
diff --git a/src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/_help.py b/src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/_help.py
index 61046e4e2..abdcc5955 100644
--- a/src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/_help.py
+++ b/src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/_help.py
@@ -3,9 +3,9 @@
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
-from azure.cli.core.help_files import helps
+# pylint: disable=line-too-long, too-many-lines
-# pylint: disable=line-too-long
+from azure.cli.core.help_files import helps
image_long_summary = """ URN aliases: CentOS, CoreOS, Debian, openSUSE, RHEL, SLES, UbuntuLTS, Win2008R2SP1, Win2012Datacenter, Win2012R2Datacenter.
Example URN: MicrosoftWindowsServer:WindowsServer:2012-R2-Datacenter:latest
@@ -146,7 +146,7 @@ helps['vm availability-set create'] = """
long-summary: For more information, see https://docs.microsoft.com/azure/virtual-machines/virtual-machines-linux-manage-availability.
examples:
- name: Create an availability set.
- text: az vm availability-set create -n MyAvSet -g MyResourceGroup
+ text: az vm availability-set create -n MyAvSet -g MyResourceGroup --platform-fault-domain-count 2 --platform-update-domain-count 2
"""
helps['vm availability-set update'] = """
@@ -239,11 +239,6 @@ helps['vm update'] = """
text: az <command> -n name -g group --remove networkProfile.networkInterfaces 3
""".format(generic_update_help)
-helps['vm show'] = """
- type: command
- short-summary: Get information about an Azure Virtual Machine.
-"""
-
helps['vmss get-instance-view'] = """
type: command
parameters:
@@ -258,6 +253,16 @@ helps['vmss reimage'] = """
short-summary: "One or more VM scale sets or specific VM instance IDs. If provided, no other 'Resource Id' arguments should be specified."
"""
+helps['vmss disk'] = """
+ type: group
+ short-summary: Manage the managed data disks associated with a virtual machine scale set.
+"""
+
+helps['vmss nic'] = """
+ type: group
+ short-summary: Manage the network interfaces associated with a virtual machine scale set.
+"""
+
helps['vmss show'] = """
type: command
parameters:
@@ -265,6 +270,16 @@ helps['vmss show'] = """
short-summary: "One or more VM scale sets or specific VM instance IDs. If provided, no other 'Resource Id' arguments should be specified."
"""
+helps['vmss update'] = """
+ type: command
+ short-summary: Update a virtual machine scale set.
+"""
+
+helps['vmss wait'] = """
+ type: command
+ short-summary: Place the CLI in a waiting state until a condition of the scale set is met.
+"""
+
helps['vm convert'] = """
type: command
short-summary: Convert a VM with unmanaged disks to use managed disks.
@@ -445,7 +460,7 @@ disk_long_summary = """
helps['vm disk'] = """
type: group
- short-summary: Manage the data disks attached to a VM.
+ short-summary: Manage the managed data disks attached to a VM.
long-summary: >
{0}
""".format(disk_long_summary)
@@ -506,6 +521,11 @@ helps['vm disk attach'] = """
text: az vm disk attach -g MyResourceGroup --vm-name MyVm --disk disk_name --new
"""
+helps['vm encryption'] = """
+ type: group
+ short-summary: Manage encryption of VM disks.
+"""
+
helps['vm extension'] = """
type: group
short-summary: Extend the functionality of your VMs with extensions.
@@ -842,6 +862,7 @@ helps['vm redeploy'] = """
helps['vm resize'] = """
type: command
+ short-summary: Update VM size.
examples:
- name: Resize a VM.
text: az vm resize -g MyResourceGroup -n MyVm --size Standard_DS3_v2
@@ -858,6 +879,7 @@ helps['vm restart'] = """
helps['vm show'] = """
type: command
+ short-summary: Show details of a VM.
examples:
- name: Show information about a VM.
text: az vm show -g MyResourceGroup -n MyVm -d
diff --git a/src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/custom.py b/src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/custom.py
index bdadd1dc3..05ee1573d 100644
--- a/src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/custom.py
+++ b/src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/custom.py
@@ -1473,17 +1473,18 @@ def availset_get(resource_group_name, name):
return _compute_client_factory().availability_sets.get(resource_group_name, name)
-def availset_set(**kwargs):
- return _compute_client_factory().availability_sets.create_or_update(**kwargs)
+def availset_set(resource_group_name, name, **kwargs):
+ return _compute_client_factory().availability_sets.create_or_update(resource_group_name, name,
+ **kwargs)
def vmss_get(resource_group_name, name):
return _compute_client_factory().virtual_machine_scale_sets.get(resource_group_name, name)
-def vmss_set(no_wait=False, **kwargs):
+def vmss_set(resource_group_name, name, no_wait=False, **kwargs):
return _compute_client_factory().virtual_machine_scale_sets.create_or_update(
- raw=no_wait, **kwargs)
+ resource_group_name, name, raw=no_wait, **kwargs)
def convert_av_set_to_managed_disk(resource_group_name, availability_set_name):
| az vmss update fails when trying to update image url for VM scale set
I am trying to update os image used in my VMSS, and ran following command but it fails:
az vmss update -g myrg -n myvmss --set virtualMachineProfile.storageProfile.osDisk.image.uri=https://mystorage.blob.core.windows.net/system/Microsoft.Compute/Images/vsts-buildimagetask/Release-2-osDisk.6efa6e4e-d44c-4554-bee5-d32361220902.vhd
Error is:
create_or_update() takes at least 4 arguments (3 given)
Traceback (most recent call last):
File "/opt/az/local/lib/python2.7/site-packages/azure/cli/main.py", line 37, in main
cmd_result = APPLICATION.execute(args)
File "/opt/az/local/lib/python2.7/site-packages/azure/cli/core/application.py", line 157, in execute
result = expanded_arg.func(params)
File "/opt/az/local/lib/python2.7/site-packages/azure/cli/core/commands/arm.py", line 309, in handler
opres = setter(client, **setterargs) if client else setter(**setterargs)
File "/opt/az/local/lib/python2.7/site-packages/azure/cli/command_modules/vm/custom.py", line 1490, in vmss_set
raw=no_wait, **kwargs)
TypeError: create_or_update() takes at least 4 arguments (3 given)
log file gist: https://gist.githubusercontent.com/bishal-pdMSFT/ffd9aa642a43de16ed56a9f887734e51/raw/2c708c0e2c281401c05ca9dad277f23d68d707d6/AzCliVmssUpdateError
### Environment summary
**Install Method:** How did you install the CLI? (e.g. pip, interactive script, apt-get, Docker, MSI, nightly)
Answer here: apt-get
**CLI Version:** What version of the CLI and modules are installed? (Available with `az --version`)
Answer here: 2.0.2
**OS Version:** What OS and version are you using?
Answer here: Ubuntu 14.04
**Shell Type:** What shell are you using? (e.g. bash, cmd.exe, PowerShell)
Answer here: bash
### Description
| Azure/azure-cli | diff --git a/src/command_modules/azure-cli-vm/tests/recordings/test_vm_availset.yaml b/src/command_modules/azure-cli-vm/tests/recordings/test_vm_availset.yaml
index 390590a73..88f576df2 100644
--- a/src/command_modules/azure-cli-vm/tests/recordings/test_vm_availset.yaml
+++ b/src/command_modules/azure-cli-vm/tests/recordings/test_vm_availset.yaml
@@ -6,19 +6,19 @@ interactions:
Accept-Encoding: ['gzip, deflate']
Connection: [keep-alive]
Content-Type: [application/json; charset=utf-8]
- User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.13.0 msrest/0.4.7
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.7
msrest_azure/0.4.7 resourcemanagementclient/0.30.2 Azure-SDK-For-Python
AZURECLI/TEST/2.0.2+dev]
accept-language: [en-US]
- x-ms-client-request-id: [4e286f26-197b-11e7-95e7-a0b3ccf7272a]
+ x-ms-client-request-id: [3742f976-1a26-11e7-b428-a0b3ccf7272a]
method: GET
- uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cliTestRg_Availset?api-version=2016-09-01
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_avail_set?api-version=2016-09-01
response:
- body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cliTestRg_Availset","name":"cliTestRg_Availset","location":"westus","tags":{"use":"az-test"},"properties":{"provisioningState":"Succeeded"}}'}
+ body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_avail_set","name":"cli_test_avail_set","location":"westus","tags":{"use":"az-test"},"properties":{"provisioningState":"Succeeded"}}'}
headers:
Cache-Control: [no-cache]
Content-Type: [application/json; charset=utf-8]
- Date: ['Tue, 04 Apr 2017 21:11:46 GMT']
+ Date: ['Wed, 05 Apr 2017 17:35:09 GMT']
Expires: ['-1']
Pragma: [no-cache]
Strict-Transport-Security: [max-age=31536000; includeSubDomains]
@@ -26,37 +26,37 @@ interactions:
content-length: ['226']
status: {code: 200, message: OK}
- request:
- body: '{"properties": {"parameters": {}, "template": {"$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#",
- "outputs": {}, "variables": {}, "contentVersion": "1.0.0.0", "resources": [{"type":
- "Microsoft.Compute/availabilitySets", "tags": {}, "location": "westus", "sku":
- {"name": "Aligned"}, "properties": {"platformFaultDomainCount": 2, "platformUpdateDomainCount":
- 2}, "apiVersion": "2016-04-30-preview", "name": "availset-test"}], "parameters":
- {}}, "mode": "Incremental"}}'
+ body: '{"properties": {"parameters": {}, "mode": "Incremental", "template": {"parameters":
+ {}, "contentVersion": "1.0.0.0", "resources": [{"apiVersion": "2016-04-30-preview",
+ "name": "availset-test", "type": "Microsoft.Compute/availabilitySets", "tags":
+ {}, "properties": {"platformUpdateDomainCount": 2, "platformFaultDomainCount":
+ 2}, "sku": {"name": "Aligned"}, "location": "westus"}], "outputs": {}, "variables":
+ {}, "$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#"}}}'
headers:
Accept: [application/json]
Accept-Encoding: ['gzip, deflate']
Connection: [keep-alive]
Content-Length: ['509']
Content-Type: [application/json; charset=utf-8]
- User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.13.0 msrest/0.4.7
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.7
msrest_azure/0.4.7 resourcemanagementclient/0.30.2 Azure-SDK-For-Python
AZURECLI/TEST/2.0.2+dev]
accept-language: [en-US]
- x-ms-client-request-id: [4e3ac158-197b-11e7-bbe5-a0b3ccf7272a]
+ x-ms-client-request-id: [375684e2-1a26-11e7-9b5e-a0b3ccf7272a]
method: PUT
- uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cliTestRg_Availset/providers/Microsoft.Resources/deployments/mock-deployment?api-version=2016-09-01
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_avail_set/providers/Microsoft.Resources/deployments/mock-deployment?api-version=2016-09-01
response:
- body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cliTestRg_Availset/providers/Microsoft.Resources/deployments/av_set_deploy_Z45qIWH5DYFgYEvvrBrZWCCBvdlvRPfI","name":"av_set_deploy_Z45qIWH5DYFgYEvvrBrZWCCBvdlvRPfI","properties":{"templateHash":"11394658800043119878","parameters":{},"mode":"Incremental","provisioningState":"Accepted","timestamp":"2017-04-04T21:11:47.8613677Z","duration":"PT0.5122453S","correlationId":"d1f30e5f-bc20-42c9-aba3-9cb4a438f725","providers":[{"namespace":"Microsoft.Compute","resourceTypes":[{"resourceType":"availabilitySets","locations":["westus"]}]}],"dependencies":[]}}'}
+ body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_avail_set/providers/Microsoft.Resources/deployments/av_set_deploy_SPDoW3eXYMD22OhOalVEdpS9W5DHL3Nn","name":"av_set_deploy_SPDoW3eXYMD22OhOalVEdpS9W5DHL3Nn","properties":{"templateHash":"8041814817506538688","parameters":{},"mode":"Incremental","provisioningState":"Accepted","timestamp":"2017-04-05T17:35:11.2753176Z","duration":"PT0.6118988S","correlationId":"18abcfb7-7f45-4334-b035-d18c53869277","providers":[{"namespace":"Microsoft.Compute","resourceTypes":[{"resourceType":"availabilitySets","locations":["westus"]}]}],"dependencies":[]}}'}
headers:
- Azure-AsyncOperation: ['https://management.azure.com/subscriptions/0b1f6471-1bf0-4dda-aec3-cb9272f09590/resourcegroups/cliTestRg_Availset/providers/Microsoft.Resources/deployments/av_set_deploy_Z45qIWH5DYFgYEvvrBrZWCCBvdlvRPfI/operationStatuses/08587102665781284905?api-version=2016-09-01']
+ Azure-AsyncOperation: ['https://management.azure.com/subscriptions/0b1f6471-1bf0-4dda-aec3-cb9272f09590/resourcegroups/cli_test_avail_set/providers/Microsoft.Resources/deployments/av_set_deploy_SPDoW3eXYMD22OhOalVEdpS9W5DHL3Nn/operationStatuses/08587101931748141948?api-version=2016-09-01']
Cache-Control: [no-cache]
- Content-Length: ['633']
+ Content-Length: ['632']
Content-Type: [application/json; charset=utf-8]
- Date: ['Tue, 04 Apr 2017 21:11:47 GMT']
+ Date: ['Wed, 05 Apr 2017 17:35:10 GMT']
Expires: ['-1']
Pragma: [no-cache]
Strict-Transport-Security: [max-age=31536000; includeSubDomains]
- x-ms-ratelimit-remaining-subscription-writes: ['1199']
+ x-ms-ratelimit-remaining-subscription-writes: ['1198']
status: {code: 201, message: Created}
- request:
body: null
@@ -65,19 +65,19 @@ interactions:
Accept-Encoding: ['gzip, deflate']
Connection: [keep-alive]
Content-Type: [application/json; charset=utf-8]
- User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.13.0 msrest/0.4.7
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.7
msrest_azure/0.4.7 resourcemanagementclient/0.30.2 Azure-SDK-For-Python
AZURECLI/TEST/2.0.2+dev]
accept-language: [en-US]
- x-ms-client-request-id: [4e3ac158-197b-11e7-bbe5-a0b3ccf7272a]
+ x-ms-client-request-id: [375684e2-1a26-11e7-9b5e-a0b3ccf7272a]
method: GET
- uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cliTestRg_Availset/providers/Microsoft.Resources/deployments/mock-deployment/operationStatuses/08587102665781284905?api-version=2016-09-01
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_avail_set/providers/Microsoft.Resources/deployments/mock-deployment/operationStatuses/08587101931748141948?api-version=2016-09-01
response:
body: {string: '{"status":"Succeeded"}'}
headers:
Cache-Control: [no-cache]
Content-Type: [application/json; charset=utf-8]
- Date: ['Tue, 04 Apr 2017 21:12:17 GMT']
+ Date: ['Wed, 05 Apr 2017 17:35:41 GMT']
Expires: ['-1']
Pragma: [no-cache]
Strict-Transport-Security: [max-age=31536000; includeSubDomains]
@@ -91,24 +91,24 @@ interactions:
Accept-Encoding: ['gzip, deflate']
Connection: [keep-alive]
Content-Type: [application/json; charset=utf-8]
- User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.13.0 msrest/0.4.7
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.7
msrest_azure/0.4.7 resourcemanagementclient/0.30.2 Azure-SDK-For-Python
AZURECLI/TEST/2.0.2+dev]
accept-language: [en-US]
- x-ms-client-request-id: [4e3ac158-197b-11e7-bbe5-a0b3ccf7272a]
+ x-ms-client-request-id: [375684e2-1a26-11e7-9b5e-a0b3ccf7272a]
method: GET
- uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cliTestRg_Availset/providers/Microsoft.Resources/deployments/mock-deployment?api-version=2016-09-01
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_avail_set/providers/Microsoft.Resources/deployments/mock-deployment?api-version=2016-09-01
response:
- body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cliTestRg_Availset/providers/Microsoft.Resources/deployments/av_set_deploy_Z45qIWH5DYFgYEvvrBrZWCCBvdlvRPfI","name":"av_set_deploy_Z45qIWH5DYFgYEvvrBrZWCCBvdlvRPfI","properties":{"templateHash":"11394658800043119878","parameters":{},"mode":"Incremental","provisioningState":"Succeeded","timestamp":"2017-04-04T21:11:55.0818441Z","duration":"PT7.7327217S","correlationId":"d1f30e5f-bc20-42c9-aba3-9cb4a438f725","providers":[{"namespace":"Microsoft.Compute","resourceTypes":[{"resourceType":"availabilitySets","locations":["westus"]}]}],"dependencies":[],"outputs":{},"outputResources":[{"id":"Microsoft.Compute/availabilitySets/availset-test"}]}}'}
+ body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_avail_set/providers/Microsoft.Resources/deployments/av_set_deploy_SPDoW3eXYMD22OhOalVEdpS9W5DHL3Nn","name":"av_set_deploy_SPDoW3eXYMD22OhOalVEdpS9W5DHL3Nn","properties":{"templateHash":"8041814817506538688","parameters":{},"mode":"Incremental","provisioningState":"Succeeded","timestamp":"2017-04-05T17:35:18.2863144Z","duration":"PT7.6228956S","correlationId":"18abcfb7-7f45-4334-b035-d18c53869277","providers":[{"namespace":"Microsoft.Compute","resourceTypes":[{"resourceType":"availabilitySets","locations":["westus"]}]}],"dependencies":[],"outputs":{},"outputResources":[{"id":"Microsoft.Compute/availabilitySets/availset-test"}]}}'}
headers:
Cache-Control: [no-cache]
Content-Type: [application/json; charset=utf-8]
- Date: ['Tue, 04 Apr 2017 21:12:18 GMT']
+ Date: ['Wed, 05 Apr 2017 17:35:41 GMT']
Expires: ['-1']
Pragma: [no-cache]
Strict-Transport-Security: [max-age=31536000; includeSubDomains]
Vary: [Accept-Encoding]
- content-length: ['725']
+ content-length: ['724']
status: {code: 200, message: OK}
- request:
body: null
@@ -117,24 +117,24 @@ interactions:
Accept-Encoding: ['gzip, deflate']
Connection: [keep-alive]
Content-Type: [application/json; charset=utf-8]
- User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.13.0 msrest/0.4.7
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.7
msrest_azure/0.4.7 computemanagementclient/0.33.1rc1 Azure-SDK-For-Python
AZURECLI/TEST/2.0.2+dev]
accept-language: [en-US]
- x-ms-client-request-id: [61eb12c2-197b-11e7-aa2f-a0b3ccf7272a]
+ x-ms-client-request-id: [4b061b0a-1a26-11e7-bd7b-a0b3ccf7272a]
method: GET
- uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cliTestRg_Availset/providers/Microsoft.Compute/availabilitySets/availset-test?api-version=2016-04-30-preview
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_avail_set/providers/Microsoft.Compute/availabilitySets/availset-test?api-version=2016-04-30-preview
response:
body: {string: "{\r\n \"properties\": {\r\n \"platformUpdateDomainCount\"\
: 2,\r\n \"platformFaultDomainCount\": 2,\r\n \"virtualMachines\": []\r\
\n },\r\n \"type\": \"Microsoft.Compute/availabilitySets\",\r\n \"location\"\
- : \"westus\",\r\n \"tags\": {},\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cliTestRg_Availset/providers/Microsoft.Compute/availabilitySets/availset-test\"\
+ : \"westus\",\r\n \"tags\": {},\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_avail_set/providers/Microsoft.Compute/availabilitySets/availset-test\"\
,\r\n \"name\": \"availset-test\",\r\n \"sku\": {\r\n \"name\": \"Aligned\"\
\r\n }\r\n}"}
headers:
Cache-Control: [no-cache]
Content-Type: [application/json; charset=utf-8]
- Date: ['Tue, 04 Apr 2017 21:12:19 GMT']
+ Date: ['Wed, 05 Apr 2017 17:35:42 GMT']
Expires: ['-1']
Pragma: [no-cache]
Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
@@ -150,32 +150,103 @@ interactions:
Accept-Encoding: ['gzip, deflate']
Connection: [keep-alive]
Content-Type: [application/json; charset=utf-8]
- User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.13.0 msrest/0.4.7
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.7
msrest_azure/0.4.7 computemanagementclient/0.33.1rc1 Azure-SDK-For-Python
AZURECLI/TEST/2.0.2+dev]
accept-language: [en-US]
- x-ms-client-request-id: [626d9c90-197b-11e7-a7bf-a0b3ccf7272a]
+ x-ms-client-request-id: [4b89e2a6-1a26-11e7-aff7-a0b3ccf7272a]
method: GET
- uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cliTestRg_Availset/providers/Microsoft.Compute/availabilitySets?api-version=2016-04-30-preview
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_avail_set/providers/Microsoft.Compute/availabilitySets/availset-test?api-version=2016-04-30-preview
+ response:
+ body: {string: "{\r\n \"properties\": {\r\n \"platformUpdateDomainCount\"\
+ : 2,\r\n \"platformFaultDomainCount\": 2,\r\n \"virtualMachines\": []\r\
+ \n },\r\n \"type\": \"Microsoft.Compute/availabilitySets\",\r\n \"location\"\
+ : \"westus\",\r\n \"tags\": {},\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_avail_set/providers/Microsoft.Compute/availabilitySets/availset-test\"\
+ ,\r\n \"name\": \"availset-test\",\r\n \"sku\": {\r\n \"name\": \"Aligned\"\
+ \r\n }\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Wed, 05 Apr 2017 17:35:43 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['449']
+ status: {code: 200, message: OK}
+- request:
+ body: '{"properties": {"virtualMachines": [], "platformUpdateDomainCount": 2,
+ "platformFaultDomainCount": 2}, "sku": {"name": "Aligned"}, "location": "westus",
+ "tags": {"test": "success"}}'
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Length: ['181']
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.7
+ msrest_azure/0.4.7 computemanagementclient/0.33.1rc1 Azure-SDK-For-Python
+ AZURECLI/TEST/2.0.2+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [4bc632d2-1a26-11e7-9654-a0b3ccf7272a]
+ method: PUT
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_avail_set/providers/Microsoft.Compute/availabilitySets/availset-test?api-version=2016-04-30-preview
+ response:
+ body: {string: "{\r\n \"properties\": {\r\n \"platformUpdateDomainCount\"\
+ : 2,\r\n \"platformFaultDomainCount\": 2\r\n },\r\n \"type\": \"Microsoft.Compute/availabilitySets\"\
+ ,\r\n \"location\": \"westus\",\r\n \"tags\": {\r\n \"test\": \"success\"\
+ \r\n },\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_avail_set/providers/Microsoft.Compute/availabilitySets/availset-test\"\
+ ,\r\n \"name\": \"availset-test\",\r\n \"sku\": {\r\n \"name\": \"Aligned\"\
+ \r\n }\r\n}"}
+ headers:
+ Cache-Control: [no-cache]
+ Content-Type: [application/json; charset=utf-8]
+ Date: ['Wed, 05 Apr 2017 17:35:44 GMT']
+ Expires: ['-1']
+ Pragma: [no-cache]
+ Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ Transfer-Encoding: [chunked]
+ Vary: [Accept-Encoding]
+ content-length: ['448']
+ x-ms-ratelimit-remaining-subscription-writes: ['1199']
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.7
+ msrest_azure/0.4.7 computemanagementclient/0.33.1rc1 Azure-SDK-For-Python
+ AZURECLI/TEST/2.0.2+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [4cac9c86-1a26-11e7-b434-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_avail_set/providers/Microsoft.Compute/availabilitySets?api-version=2016-04-30-preview
response:
body: {string: "{\r\n \"value\": [\r\n {\r\n \"properties\": {\r\n \
\ \"platformUpdateDomainCount\": 2,\r\n \"platformFaultDomainCount\"\
: 2,\r\n \"virtualMachines\": []\r\n },\r\n \"type\": \"\
Microsoft.Compute/availabilitySets\",\r\n \"location\": \"westus\",\r\
- \n \"tags\": {},\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cliTestRg_Availset/providers/Microsoft.Compute/availabilitySets/availset-test\"\
+ \n \"tags\": {\r\n \"test\": \"success\"\r\n },\r\n \
+ \ \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_avail_set/providers/Microsoft.Compute/availabilitySets/availset-test\"\
,\r\n \"name\": \"availset-test\",\r\n \"sku\": {\r\n \"\
name\": \"Aligned\"\r\n }\r\n }\r\n ]\r\n}"}
headers:
Cache-Control: [no-cache]
Content-Type: [application/json; charset=utf-8]
- Date: ['Tue, 04 Apr 2017 21:12:20 GMT']
+ Date: ['Wed, 05 Apr 2017 17:35:45 GMT']
Expires: ['-1']
Pragma: [no-cache]
Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
Strict-Transport-Security: [max-age=31536000; includeSubDomains]
Transfer-Encoding: [chunked]
Vary: [Accept-Encoding]
- content-length: ['534']
+ content-length: ['569']
status: {code: 200, message: OK}
- request:
body: null
@@ -184,13 +255,13 @@ interactions:
Accept-Encoding: ['gzip, deflate']
Connection: [keep-alive]
Content-Type: [application/json; charset=utf-8]
- User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.13.0 msrest/0.4.7
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.7
msrest_azure/0.4.7 computemanagementclient/0.33.1rc1 Azure-SDK-For-Python
AZURECLI/TEST/2.0.2+dev]
accept-language: [en-US]
- x-ms-client-request-id: [62f493f6-197b-11e7-9f88-a0b3ccf7272a]
+ x-ms-client-request-id: [4d2ba6c6-1a26-11e7-ba66-a0b3ccf7272a]
method: GET
- uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cliTestRg_Availset/providers/Microsoft.Compute/availabilitySets/availset-test/vmSizes?api-version=2016-04-30-preview
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_avail_set/providers/Microsoft.Compute/availabilitySets/availset-test/vmSizes?api-version=2016-04-30-preview
response:
body: {string: "{\r\n \"value\": [\r\n {\r\n \"name\": \"Standard_DS1_v2\"\
,\r\n \"numberOfCores\": 1,\r\n \"osDiskSizeInMB\": 1047552,\r\n\
@@ -517,7 +588,7 @@ interactions:
headers:
Cache-Control: [no-cache]
Content-Type: [application/json; charset=utf-8]
- Date: ['Tue, 04 Apr 2017 21:12:21 GMT']
+ Date: ['Wed, 05 Apr 2017 17:35:45 GMT']
Expires: ['-1']
Pragma: [no-cache]
Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
@@ -533,31 +604,32 @@ interactions:
Accept-Encoding: ['gzip, deflate']
Connection: [keep-alive]
Content-Type: [application/json; charset=utf-8]
- User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.13.0 msrest/0.4.7
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.7
msrest_azure/0.4.7 computemanagementclient/0.33.1rc1 Azure-SDK-For-Python
AZURECLI/TEST/2.0.2+dev]
accept-language: [en-US]
- x-ms-client-request-id: [639e81dc-197b-11e7-85b0-a0b3ccf7272a]
+ x-ms-client-request-id: [4dca4c80-1a26-11e7-82f4-a0b3ccf7272a]
method: GET
- uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cliTestRg_Availset/providers/Microsoft.Compute/availabilitySets/availset-test?api-version=2016-04-30-preview
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_avail_set/providers/Microsoft.Compute/availabilitySets/availset-test?api-version=2016-04-30-preview
response:
body: {string: "{\r\n \"properties\": {\r\n \"platformUpdateDomainCount\"\
: 2,\r\n \"platformFaultDomainCount\": 2,\r\n \"virtualMachines\": []\r\
\n },\r\n \"type\": \"Microsoft.Compute/availabilitySets\",\r\n \"location\"\
- : \"westus\",\r\n \"tags\": {},\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cliTestRg_Availset/providers/Microsoft.Compute/availabilitySets/availset-test\"\
+ : \"westus\",\r\n \"tags\": {\r\n \"test\": \"success\"\r\n },\r\n \"\
+ id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_avail_set/providers/Microsoft.Compute/availabilitySets/availset-test\"\
,\r\n \"name\": \"availset-test\",\r\n \"sku\": {\r\n \"name\": \"Aligned\"\
\r\n }\r\n}"}
headers:
Cache-Control: [no-cache]
Content-Type: [application/json; charset=utf-8]
- Date: ['Tue, 04 Apr 2017 21:12:22 GMT']
+ Date: ['Wed, 05 Apr 2017 17:35:47 GMT']
Expires: ['-1']
Pragma: [no-cache]
Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
Strict-Transport-Security: [max-age=31536000; includeSubDomains]
Transfer-Encoding: [chunked]
Vary: [Accept-Encoding]
- content-length: ['449']
+ content-length: ['476']
status: {code: 200, message: OK}
- request:
body: null
@@ -567,24 +639,24 @@ interactions:
Connection: [keep-alive]
Content-Length: ['0']
Content-Type: [application/json; charset=utf-8]
- User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.13.0 msrest/0.4.7
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.7
msrest_azure/0.4.7 computemanagementclient/0.33.1rc1 Azure-SDK-For-Python
AZURECLI/TEST/2.0.2+dev]
accept-language: [en-US]
- x-ms-client-request-id: [6444777e-197b-11e7-9414-a0b3ccf7272a]
+ x-ms-client-request-id: [4e5daefa-1a26-11e7-ae0d-a0b3ccf7272a]
method: DELETE
- uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cliTestRg_Availset/providers/Microsoft.Compute/availabilitySets/availset-test?api-version=2016-04-30-preview
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_avail_set/providers/Microsoft.Compute/availabilitySets/availset-test?api-version=2016-04-30-preview
response:
body: {string: ''}
headers:
Cache-Control: [no-cache]
Content-Length: ['0']
- Date: ['Tue, 04 Apr 2017 21:12:24 GMT']
+ Date: ['Wed, 05 Apr 2017 17:35:48 GMT']
Expires: ['-1']
Pragma: [no-cache]
Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
Strict-Transport-Security: [max-age=31536000; includeSubDomains]
- x-ms-ratelimit-remaining-subscription-writes: ['1198']
+ x-ms-ratelimit-remaining-subscription-writes: ['1197']
status: {code: 200, message: OK}
- request:
body: null
@@ -593,19 +665,19 @@ interactions:
Accept-Encoding: ['gzip, deflate']
Connection: [keep-alive]
Content-Type: [application/json; charset=utf-8]
- User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.13.0 msrest/0.4.7
+ User-Agent: [python/3.5.1 (Windows-10-10.0.14393-SP0) requests/2.9.1 msrest/0.4.7
msrest_azure/0.4.7 computemanagementclient/0.33.1rc1 Azure-SDK-For-Python
AZURECLI/TEST/2.0.2+dev]
accept-language: [en-US]
- x-ms-client-request-id: [652fe500-197b-11e7-83aa-a0b3ccf7272a]
+ x-ms-client-request-id: [4f558e4c-1a26-11e7-8914-a0b3ccf7272a]
method: GET
- uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cliTestRg_Availset/providers/Microsoft.Compute/availabilitySets?api-version=2016-04-30-preview
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_avail_set/providers/Microsoft.Compute/availabilitySets?api-version=2016-04-30-preview
response:
body: {string: '{"value":[]}'}
headers:
Cache-Control: [no-cache]
Content-Type: [application/json; charset=utf-8]
- Date: ['Tue, 04 Apr 2017 21:12:24 GMT']
+ Date: ['Wed, 05 Apr 2017 17:35:49 GMT']
Expires: ['-1']
Pragma: [no-cache]
Strict-Transport-Security: [max-age=31536000; includeSubDomains]
diff --git a/src/command_modules/azure-cli-vm/tests/recordings/test_vmss_create_none_options.yaml b/src/command_modules/azure-cli-vm/tests/recordings/test_vmss_create_none_options.yaml
index c1d002b79..7f4a2c0b3 100644
--- a/src/command_modules/azure-cli-vm/tests/recordings/test_vmss_create_none_options.yaml
+++ b/src/command_modules/azure-cli-vm/tests/recordings/test_vmss_create_none_options.yaml
@@ -6,62 +6,62 @@ interactions:
Accept-Encoding: ['gzip, deflate']
Connection: [keep-alive]
Content-Type: [application/json; charset=utf-8]
- User-Agent: [python/3.5.0 (Windows-10.0.14393) requests/2.9.1 msrest/0.4.4 msrest_azure/0.4.7
- resourcemanagementclient/0.30.2 Azure-SDK-For-Python AZURECLI/TEST/0.1.1b2+dev]
+ User-Agent: [python/2.7.11 (Windows-10-10.0.14393) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 resourcemanagementclient/0.30.2 Azure-SDK-For-Python
+ AZURECLI/TEST/2.0.2+dev]
accept-language: [en-US]
- x-ms-client-request-id: [4bdb5d0c-e75c-11e6-9bb5-64510658e3b3]
+ x-ms-client-request-id: [510486a1-1a26-11e7-a3e8-a0b3ccf7272a]
method: GET
uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_vmss_create_none_options?api-version=2016-09-01
response:
- body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_vmss_create_none_options","name":"cli_test_vmss_create_none_options","location":"westus","tags":{"use":"az-test"},"properties":{"provisioningState":"Succeeded"}}'}
+ body: {string: !!python/unicode '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_vmss_create_none_options","name":"cli_test_vmss_create_none_options","location":"westus","tags":{"use":"az-test"},"properties":{"provisioningState":"Succeeded"}}'}
headers:
- Cache-Control: [no-cache]
- Content-Type: [application/json; charset=utf-8]
- Date: ['Tue, 31 Jan 2017 02:23:47 GMT']
- Expires: ['-1']
- Pragma: [no-cache]
- Strict-Transport-Security: [max-age=31536000; includeSubDomains]
- Vary: [Accept-Encoding]
+ cache-control: [no-cache]
content-length: ['256']
+ content-type: [application/json; charset=utf-8]
+ date: ['Wed, 05 Apr 2017 17:35:52 GMT']
+ expires: ['-1']
+ pragma: [no-cache]
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ vary: [Accept-Encoding]
status: {code: 200, message: OK}
- request:
body: null
headers:
Connection: [close]
Host: [raw.githubusercontent.com]
- User-Agent: [Python-urllib/3.5]
+ User-Agent: [Python-urllib/2.7]
method: GET
uri: https://raw.githubusercontent.com/Azure/azure-rest-api-specs/master/arm-compute/quickstart-templates/aliases.json
response:
- body: {string: "{\n \"$schema\":\"http://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json\"\
+ body: {string: !!python/unicode "{\n \"$schema\":\"http://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json\"\
,\n \"contentVersion\":\"1.0.0.0\",\n \"parameters\":{},\n \"variables\"\
:{},\n \"resources\":[],\n\n \"outputs\":{\n \"aliases\":{\n \"\
- metadata\":{\n \"description\":\"This list of aliases is used by Azure\
- \ XPLAT CLI, Azure Powershell, and Azure Portal as shorthands for commonly\
- \ used VM images. If you change this file, please verify that this doesn't\
- \ break VMSS creation from Portal :).\"\n },\n \"type\":\"object\"\
- ,\n \"value\":{\n\n \"Linux\":{\n \"CentOS\":{\n \
- \ \"publisher\":\"OpenLogic\",\n \"offer\":\"CentOS\",\n\
- \ \"sku\":\"7.2\",\n \"version\":\"latest\"\n \
- \ },\n \"CoreOS\":{\n \"publisher\":\"CoreOS\",\n \
- \ \"offer\":\"CoreOS\",\n \"sku\":\"Stable\",\n \
- \ \"version\":\"latest\"\n },\n \"Debian\":{\n \
- \ \"publisher\":\"credativ\",\n \"offer\":\"Debian\",\n\
- \ \"sku\":\"8\",\n \"version\":\"latest\"\n \
- \ },\n \"openSUSE\":{\n \"publisher\":\"SUSE\",\n \
- \ \"offer\":\"openSUSE\",\n \"sku\":\"13.2\",\n \
- \ \"version\":\"latest\"\n },\n \"RHEL\":{\n \
- \ \"publisher\":\"RedHat\",\n \"offer\":\"RHEL\",\n \
- \ \"sku\":\"7.2\",\n \"version\":\"latest\"\n },\n\
- \ \"SLES\":{\n \"publisher\":\"SUSE\",\n \"\
- offer\":\"SLES\",\n \"sku\":\"12-SP1\",\n \"version\"\
- :\"latest\"\n },\n \"UbuntuLTS\":{\n \"publisher\"\
- :\"Canonical\",\n \"offer\":\"UbuntuServer\",\n \"sku\"\
- :\"14.04.4-LTS\",\n \"version\":\"latest\"\n }\n \
- \ },\n\n \"Windows\":{\n \"Win2012R2Datacenter\":{\n \
- \ \"publisher\":\"MicrosoftWindowsServer\",\n \"offer\"\
- :\"WindowsServer\",\n \"sku\":\"2012-R2-Datacenter\",\n \
- \ \"version\":\"latest\"\n },\n \"Win2012Datacenter\"\
+ type\":\"object\",\n \"value\":{\n\n \"Linux\":{\n \"\
+ CentOS\":{\n \"publisher\":\"OpenLogic\",\n \"offer\"\
+ :\"CentOS\",\n \"sku\":\"7.3\",\n \"version\":\"latest\"\
+ \n },\n \"CoreOS\":{\n \"publisher\":\"CoreOS\"\
+ ,\n \"offer\":\"CoreOS\",\n \"sku\":\"Stable\",\n \
+ \ \"version\":\"latest\"\n },\n \"Debian\":{\n\
+ \ \"publisher\":\"credativ\",\n \"offer\":\"Debian\"\
+ ,\n \"sku\":\"8\",\n \"version\":\"latest\"\n \
+ \ },\n \"openSUSE-Leap\": {\n \"publisher\":\"SUSE\"\
+ ,\n \"offer\":\"openSUSE-Leap\",\n \"sku\":\"42.2\"\
+ ,\n \"version\": \"latest\"\n },\n \"RHEL\":{\n\
+ \ \"publisher\":\"RedHat\",\n \"offer\":\"RHEL\",\n\
+ \ \"sku\":\"7.3\",\n \"version\":\"latest\"\n \
+ \ },\n \"SLES\":{\n \"publisher\":\"SUSE\",\n \
+ \ \"offer\":\"SLES\",\n \"sku\":\"12-SP2\",\n \
+ \ \"version\":\"latest\"\n },\n \"UbuntuLTS\":{\n \
+ \ \"publisher\":\"Canonical\",\n \"offer\":\"UbuntuServer\"\
+ ,\n \"sku\":\"16.04-LTS\",\n \"version\":\"latest\"\n\
+ \ }\n },\n\n \"Windows\":{\n \"Win2016Datacenter\"\
+ :{\n \"publisher\":\"MicrosoftWindowsServer\",\n \"\
+ offer\":\"WindowsServer\",\n \"sku\":\"2016-Datacenter\",\n \
+ \ \"version\":\"latest\"\n },\n \"Win2012R2Datacenter\"\
+ :{\n \"publisher\":\"MicrosoftWindowsServer\",\n \"\
+ offer\":\"WindowsServer\",\n \"sku\":\"2012-R2-Datacenter\",\n\
+ \ \"version\":\"latest\"\n },\n \"Win2012Datacenter\"\
:{\n \"publisher\":\"MicrosoftWindowsServer\",\n \"\
offer\":\"WindowsServer\",\n \"sku\":\"2012-Datacenter\",\n \
\ \"version\":\"latest\"\n },\n \"Win2008R2SP1\"\
@@ -70,30 +70,30 @@ interactions:
\ \"version\":\"latest\"\n }\n }\n }\n }\n }\n\
}\n"}
headers:
- Accept-Ranges: [bytes]
- Access-Control-Allow-Origin: ['*']
- Cache-Control: [max-age=300]
- Connection: [close]
- Content-Length: ['2297']
- Content-Security-Policy: [default-src 'none'; style-src 'unsafe-inline']
- Content-Type: [text/plain; charset=utf-8]
- Date: ['Tue, 31 Jan 2017 02:23:48 GMT']
- ETag: ['"db78eb36618a060181b32ac2de91b1733f382e01"']
- Expires: ['Tue, 31 Jan 2017 02:28:48 GMT']
- Source-Age: ['92']
- Strict-Transport-Security: [max-age=31536000]
- Vary: ['Authorization,Accept-Encoding']
- Via: [1.1 varnish]
- X-Cache: [HIT]
- X-Cache-Hits: ['4']
- X-Content-Type-Options: [nosniff]
- X-Fastly-Request-ID: [e3d643fc88bf8216e8c0b03c54d6372bbae38ffe]
- X-Frame-Options: [deny]
- X-Geo-Block-List: ['']
- X-GitHub-Request-Id: ['D59A:5F89:1173F0A:1201B89:588FF4D5']
- X-Served-By: [cache-den6020-DEN]
- X-Timer: ['S1485829428.113981,VS0,VE0']
- X-XSS-Protection: [1; mode=block]
+ accept-ranges: [bytes]
+ access-control-allow-origin: ['*']
+ cache-control: [max-age=300]
+ connection: [close]
+ content-length: ['2235']
+ content-security-policy: [default-src 'none'; style-src 'unsafe-inline']
+ content-type: [text/plain; charset=utf-8]
+ date: ['Wed, 05 Apr 2017 17:35:54 GMT']
+ etag: ['"d6824855d13e27c5258a680eb60f635d088fd05e"']
+ expires: ['Wed, 05 Apr 2017 17:40:54 GMT']
+ source-age: ['0']
+ strict-transport-security: [max-age=31536000]
+ vary: ['Authorization,Accept-Encoding']
+ via: [1.1 varnish]
+ x-cache: [MISS]
+ x-cache-hits: ['0']
+ x-content-type-options: [nosniff]
+ x-fastly-request-id: [629fd3a682815aa9222dcda70eedfd53926468a4]
+ x-frame-options: [deny]
+ x-geo-block-list: ['']
+ x-github-request-id: ['21DA:04F8:2EAC905:3080E89:58E52AFA']
+ x-served-by: [cache-sea1021-SEA]
+ x-timer: ['S1491413754.234857,VS0,VE414']
+ x-xss-protection: [1; mode=block]
status: {code: 200, message: OK}
- request:
body: null
@@ -102,78 +102,73 @@ interactions:
Accept-Encoding: ['gzip, deflate']
Connection: [keep-alive]
Content-Type: [application/json; charset=utf-8]
- User-Agent: [python/3.5.0 (Windows-10.0.14393) requests/2.9.1 msrest/0.4.4 msrest_azure/0.4.7
- networkmanagementclient/0.30.0 Azure-SDK-For-Python AZURECLI/TEST/0.1.1b2+dev]
+ User-Agent: [python/2.7.11 (Windows-10-10.0.14393) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.2+dev]
accept-language: [en-US]
- x-ms-client-request-id: [4c1c8576-e75c-11e6-b23d-64510658e3b3]
+ x-ms-client-request-id: [5203a71e-1a26-11e7-8815-a0b3ccf7272a]
method: GET
uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_vmss_create_none_options/providers/Microsoft.Network/virtualNetworks?api-version=2016-09-01
response:
- body: {string: '{"value":[]}'}
+ body: {string: !!python/unicode '{"value":[]}'}
headers:
- Cache-Control: [no-cache]
- Content-Type: [application/json; charset=utf-8]
- Date: ['Tue, 31 Jan 2017 02:23:48 GMT']
- Expires: ['-1']
- Pragma: [no-cache]
- Strict-Transport-Security: [max-age=31536000; includeSubDomains]
- Vary: [Accept-Encoding]
+ cache-control: [no-cache]
content-length: ['12']
+ content-type: [application/json; charset=utf-8]
+ date: ['Wed, 05 Apr 2017 17:35:53 GMT']
+ expires: ['-1']
+ pragma: [no-cache]
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ vary: [Accept-Encoding]
status: {code: 200, message: OK}
- request:
- body: '{"properties": {"template": {"contentVersion": "1.0.0.0", "resources":
- [{"dependsOn": [], "properties": {"addressSpace": {"addressPrefixes": ["10.0.0.0/16"]},
- "subnets": [{"properties": {"addressPrefix": "10.0.0.0/24"}, "name": "vmss1Subnet"}]},
- "type": "Microsoft.Network/virtualNetworks", "apiVersion": "2015-06-15", "tags":
- {}, "name": "vmss1VNET", "location": "westus"}, {"dependsOn": ["Microsoft.Network/virtualNetworks/vmss1VNET"],
- "properties": {"upgradePolicy": {"mode": "Manual"}, "singlePlacementGroup":
- true, "virtualMachineProfile": {"networkProfile": {"networkInterfaceConfigurations":
- [{"properties": {"primary": "true", "ipConfigurations": [{"properties": {"subnet":
- {"id": "/subscriptions/0b1f6471-1bf0-4dda-aec3-cb9272f09590/resourceGroups/cli_test_vmss_create_none_options/providers/Microsoft.Network/virtualNetworks/vmss1VNET/subnets/vmss1Subnet"}},
- "name": "vmss11i6rIPConfig"}]}, "name": "vmss11i6rNic"}]}, "storageProfile":
- {"imageReference": {"offer": "Debian", "sku": "8", "publisher": "credativ",
- "version": "latest"}, "osDisk": {"caching": "ReadOnly", "managedDisk": {"storageAccountType":
- "Standard_LRS"}, "createOption": "FromImage"}}, "osProfile": {"adminUsername":
- "yugangw", "computerNamePrefix": "vmss11i6r", "linuxConfiguration": {"disablePasswordAuthentication":
- true, "ssh": {"publicKeys": [{"path": "/home/yugangw/.ssh/authorized_keys",
- "keyData": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQCbIg1guRHbI0lV11wWDt1r2cUdcNd27CJsg+SfgC7miZeubtwUhbsPdhMQsfDyhOWHq1+ZL0M+nJZV63d/1dhmhtgyOqejUwrPlzKhydsbrsdUor+JmNJDdW01v7BXHyuymT8G4s09jCasNOwiufbP/qp72ruu0bIA1nySsvlf9pCQAuFkAnVnf/rFhUlOkhtRpwcq8SUNY2zRHR/EKb/4NWY1JzR4sa3q2fWIJdrrX0DvLoa5g9bIEd4Df79ba7v+yiUBOS0zT2ll+z4g9izHK3EO5d8hL4jYxcjKs+wcslSYRWrascfscLgMlMGh0CdKeNTDjHpGPncaf3Z+FwwwjWeuiNBxv7bJo13/8B/098KlVDl4GZqsoBCEjPyJfV6hO0y/LkRGkk7oHWKgeWAfKtfLItRp00eZ4fcJNK9kCaSMmEugoZWcI7NGbZXzqFWqbpRI7NcDP9+WIQ+i9U5vqWsqd/zng4kbuAJ6UuKqIzB0upYrLShfQE3SAck8oaLhJqqq56VfDuASNpJKidV+zq27HfSBmbXnkR/5AK337dc3MXKJypoK/QPMLKUAP5XLPbs+NddJQV7EZXd29DLgp+fRIg3edpKdO7ZErWhv7d+3Kws+e1Y+ypmR2WIVSwVyBEUfgv2C8Ts9gnTF4pNcEY/S2aBicz5Ew2+jdyGNQQ==
- [email protected]\n"}]}}}}, "overprovision": true}, "type": "Microsoft.Compute/virtualMachineScaleSets",
- "apiVersion": "2016-04-30-preview", "tags": {}, "name": "vmss1", "sku": {"name":
- "Standard_D1_v2", "tier": "Standard", "capacity": 2}, "location": "westus"}],
- "variables": {"storageAccountNames": ["vmss11i6r0", "vmss11i6r1", "vmss11i6r2",
- "vmss11i6r3", "vmss11i6r4"], "vhdContainers": ["[concat(''https://'', variables(''storageAccountNames'')[0],
- ''.blob.core.windows.net/vhds'')]", "[concat(''https://'', variables(''storageAccountNames'')[1],
- ''.blob.core.windows.net/vhds'')]", "[concat(''https://'', variables(''storageAccountNames'')[2],
- ''.blob.core.windows.net/vhds'')]", "[concat(''https://'', variables(''storageAccountNames'')[3],
- ''.blob.core.windows.net/vhds'')]", "[concat(''https://'', variables(''storageAccountNames'')[4],
- ''.blob.core.windows.net/vhds'')]"]}, "outputs": {"VMSS": {"value": "[reference(resourceId(''Microsoft.Compute/virtualMachineScaleSets'',
- ''vmss1''),providers(''Microsoft.Compute'', ''virtualMachineScaleSets'').apiVersions[0])]",
- "type": "object"}}, "$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#",
- "parameters": {}}, "parameters": {}, "mode": "Incremental"}}'
+ body: !!python/unicode '{"properties": {"mode": "Incremental", "parameters": {},
+ "template": {"parameters": {}, "outputs": {"VMSS": {"type": "object", "value":
+ "[reference(resourceId(''Microsoft.Compute/virtualMachineScaleSets'', ''vmss1''),providers(''Microsoft.Compute'',
+ ''virtualMachineScaleSets'').apiVersions[0])]"}}, "variables": {}, "contentVersion":
+ "1.0.0.0", "$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#",
+ "resources": [{"name": "vmss1VNET", "tags": {}, "apiVersion": "2015-06-15",
+ "location": "westus", "dependsOn": [], "type": "Microsoft.Network/virtualNetworks",
+ "properties": {"subnets": [{"name": "vmss1Subnet", "properties": {"addressPrefix":
+ "10.0.0.0/24"}}], "addressSpace": {"addressPrefixes": ["10.0.0.0/16"]}}}, {"sku":
+ {"tier": "Standard", "capacity": 2, "name": "Standard_D1_v2"}, "name": "vmss1",
+ "tags": {}, "apiVersion": "2016-04-30-preview", "location": "westus", "dependsOn":
+ ["Microsoft.Network/virtualNetworks/vmss1VNET"], "type": "Microsoft.Compute/virtualMachineScaleSets",
+ "properties": {"singlePlacementGroup": true, "virtualMachineProfile": {"storageProfile":
+ {"imageReference": {"sku": "8", "publisher": "credativ", "version": "latest",
+ "offer": "Debian"}, "osDisk": {"caching": "ReadWrite", "managedDisk": {"storageAccountType":
+ "Standard_LRS"}, "createOption": "FromImage"}}, "osProfile": {"computerNamePrefix":
+ "vmss1247x", "adminUsername": "ubuntu", "linuxConfiguration": {"ssh": {"publicKeys":
+ [{"path": "/home/ubuntu/.ssh/authorized_keys", "keyData": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQCbIg1guRHbI0lV11wWDt1r2cUdcNd27CJsg+SfgC7miZeubtwUhbsPdhMQsfDyhOWHq1+ZL0M+nJZV63d/1dhmhtgyOqejUwrPlzKhydsbrsdUor+JmNJDdW01v7BXHyuymT8G4s09jCasNOwiufbP/qp72ruu0bIA1nySsvlf9pCQAuFkAnVnf/rFhUlOkhtRpwcq8SUNY2zRHR/EKb/4NWY1JzR4sa3q2fWIJdrrX0DvLoa5g9bIEd4Df79ba7v+yiUBOS0zT2ll+z4g9izHK3EO5d8hL4jYxcjKs+wcslSYRWrascfscLgMlMGh0CdKeNTDjHpGPncaf3Z+FwwwjWeuiNBxv7bJo13/8B/098KlVDl4GZqsoBCEjPyJfV6hO0y/LkRGkk7oHWKgeWAfKtfLItRp00eZ4fcJNK9kCaSMmEugoZWcI7NGbZXzqFWqbpRI7NcDP9+WIQ+i9U5vqWsqd/zng4kbuAJ6UuKqIzB0upYrLShfQE3SAck8oaLhJqqq56VfDuASNpJKidV+zq27HfSBmbXnkR/5AK337dc3MXKJypoK/QPMLKUAP5XLPbs+NddJQV7EZXd29DLgp+fRIg3edpKdO7ZErWhv7d+3Kws+e1Y+ypmR2WIVSwVyBEUfgv2C8Ts9gnTF4pNcEY/S2aBicz5Ew2+jdyGNQQ==
+ [email protected]\n"}]}, "disablePasswordAuthentication": true}}, "networkProfile":
+ {"networkInterfaceConfigurations": [{"name": "vmss1247xNic", "properties": {"primary":
+ "true", "ipConfigurations": [{"name": "vmss1247xIPConfig", "properties": {"subnet":
+ {"id": "/subscriptions/0b1f6471-1bf0-4dda-aec3-cb9272f09590/resourceGroups/cli_test_vmss_create_none_options/providers/Microsoft.Network/virtualNetworks/vmss1VNET/subnets/vmss1Subnet"}}}]}}]}},
+ "overprovision": true, "upgradePolicy": {"mode": "Manual"}}}]}}}'
headers:
Accept: [application/json]
Accept-Encoding: ['gzip, deflate']
Connection: [keep-alive]
- Content-Length: ['3326']
+ Content-Length: ['2753']
Content-Type: [application/json; charset=utf-8]
- User-Agent: [python/3.5.0 (Windows-10.0.14393) requests/2.9.1 msrest/0.4.4 msrest_azure/0.4.7
- resourcemanagementclient/0.30.2 Azure-SDK-For-Python AZURECLI/TEST/0.1.1b2+dev]
+ User-Agent: [python/2.7.11 (Windows-10-10.0.14393) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 resourcemanagementclient/0.30.2 Azure-SDK-For-Python
+ AZURECLI/TEST/2.0.2+dev]
accept-language: [en-US]
- x-ms-client-request-id: [4c40fd54-e75c-11e6-b444-64510658e3b3]
+ x-ms-client-request-id: [521dbecf-1a26-11e7-9c8c-a0b3ccf7272a]
method: PUT
uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_vmss_create_none_options/providers/Microsoft.Resources/deployments/mock-deployment?api-version=2016-09-01
response:
- body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_vmss_create_none_options/providers/Microsoft.Resources/deployments/vmss_deploy_N5L40IU9kvBddIGfYy2Gb51rQeR9Fol0","name":"vmss_deploy_N5L40IU9kvBddIGfYy2Gb51rQeR9Fol0","properties":{"parameters":{},"mode":"Incremental","provisioningState":"Accepted","timestamp":"2017-01-31T02:23:49.4134276Z","duration":"PT0.612975S","correlationId":"fa45a063-c769-4a35-9c91-7c86415baa2e","providers":[{"namespace":"Microsoft.Network","resourceTypes":[{"resourceType":"virtualNetworks","locations":["westus"]}]},{"namespace":"Microsoft.Compute","resourceTypes":[{"resourceType":"virtualMachineScaleSets","locations":["westus"]}]}],"dependencies":[{"dependsOn":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_vmss_create_none_options/providers/Microsoft.Network/virtualNetworks/vmss1VNET","resourceType":"Microsoft.Network/virtualNetworks","resourceName":"vmss1VNET"}],"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_vmss_create_none_options/providers/Microsoft.Compute/virtualMachineScaleSets/vmss1","resourceType":"Microsoft.Compute/virtualMachineScaleSets","resourceName":"vmss1"}]}}'}
+ body: {string: !!python/unicode '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_vmss_create_none_options/providers/Microsoft.Resources/deployments/vmss_deploy_oexSGEOf0DQtjm9VsFUp9ahA99kjuRkp","name":"vmss_deploy_oexSGEOf0DQtjm9VsFUp9ahA99kjuRkp","properties":{"templateHash":"15286152982969927295","parameters":{},"mode":"Incremental","provisioningState":"Accepted","timestamp":"2017-04-05T17:35:55.8908997Z","duration":"PT0.5316267S","correlationId":"a0d7e709-a029-4ad7-9523-a2975d4f2751","providers":[{"namespace":"Microsoft.Network","resourceTypes":[{"resourceType":"virtualNetworks","locations":["westus"]}]},{"namespace":"Microsoft.Compute","resourceTypes":[{"resourceType":"virtualMachineScaleSets","locations":["westus"]}]}],"dependencies":[{"dependsOn":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_vmss_create_none_options/providers/Microsoft.Network/virtualNetworks/vmss1VNET","resourceType":"Microsoft.Network/virtualNetworks","resourceName":"vmss1VNET"}],"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_vmss_create_none_options/providers/Microsoft.Compute/virtualMachineScaleSets/vmss1","resourceType":"Microsoft.Compute/virtualMachineScaleSets","resourceName":"vmss1"}]}}'}
headers:
- Azure-AsyncOperation: ['https://management.azure.com/subscriptions/0b1f6471-1bf0-4dda-aec3-cb9272f09590/resourcegroups/cli_test_vmss_create_none_options/providers/Microsoft.Resources/deployments/vmss_deploy_N5L40IU9kvBddIGfYy2Gb51rQeR9Fol0/operationStatuses/08587157774566771962?api-version=2016-09-01']
- Cache-Control: [no-cache]
- Content-Length: ['1239']
- Content-Type: [application/json; charset=utf-8]
- Date: ['Tue, 31 Jan 2017 02:23:49 GMT']
- Expires: ['-1']
- Pragma: [no-cache]
- Strict-Transport-Security: [max-age=31536000; includeSubDomains]
- x-ms-ratelimit-remaining-subscription-writes: ['1198']
+ azure-asyncoperation: ['https://management.azure.com/subscriptions/0b1f6471-1bf0-4dda-aec3-cb9272f09590/resourcegroups/cli_test_vmss_create_none_options/providers/Microsoft.Resources/deployments/vmss_deploy_oexSGEOf0DQtjm9VsFUp9ahA99kjuRkp/operationStatuses/08587101931301183462?api-version=2016-09-01']
+ cache-control: [no-cache]
+ content-length: ['1278']
+ content-type: [application/json; charset=utf-8]
+ date: ['Wed, 05 Apr 2017 17:35:56 GMT']
+ expires: ['-1']
+ pragma: [no-cache]
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ x-ms-ratelimit-remaining-subscription-writes: ['1197']
status: {code: 201, message: Created}
- request:
body: null
@@ -182,23 +177,24 @@ interactions:
Accept-Encoding: ['gzip, deflate']
Connection: [keep-alive]
Content-Type: [application/json; charset=utf-8]
- User-Agent: [python/3.5.0 (Windows-10.0.14393) requests/2.9.1 msrest/0.4.4 msrest_azure/0.4.7
- resourcemanagementclient/0.30.2 Azure-SDK-For-Python AZURECLI/TEST/0.1.1b2+dev]
+ User-Agent: [python/2.7.11 (Windows-10-10.0.14393) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 resourcemanagementclient/0.30.2 Azure-SDK-For-Python
+ AZURECLI/TEST/2.0.2+dev]
accept-language: [en-US]
- x-ms-client-request-id: [4c40fd54-e75c-11e6-b444-64510658e3b3]
+ x-ms-client-request-id: [521dbecf-1a26-11e7-9c8c-a0b3ccf7272a]
method: GET
- uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_vmss_create_none_options/providers/Microsoft.Resources/deployments/mock-deployment/operationStatuses/08587157774566771962?api-version=2016-09-01
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_vmss_create_none_options/providers/Microsoft.Resources/deployments/mock-deployment/operationStatuses/08587101931301183462?api-version=2016-09-01
response:
- body: {string: '{"status":"Running"}'}
+ body: {string: !!python/unicode '{"status":"Running"}'}
headers:
- Cache-Control: [no-cache]
- Content-Type: [application/json; charset=utf-8]
- Date: ['Tue, 31 Jan 2017 02:24:19 GMT']
- Expires: ['-1']
- Pragma: [no-cache]
- Strict-Transport-Security: [max-age=31536000; includeSubDomains]
- Vary: [Accept-Encoding]
+ cache-control: [no-cache]
content-length: ['20']
+ content-type: [application/json; charset=utf-8]
+ date: ['Wed, 05 Apr 2017 17:36:26 GMT']
+ expires: ['-1']
+ pragma: [no-cache]
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ vary: [Accept-Encoding]
status: {code: 200, message: OK}
- request:
body: null
@@ -207,23 +203,24 @@ interactions:
Accept-Encoding: ['gzip, deflate']
Connection: [keep-alive]
Content-Type: [application/json; charset=utf-8]
- User-Agent: [python/3.5.0 (Windows-10.0.14393) requests/2.9.1 msrest/0.4.4 msrest_azure/0.4.7
- resourcemanagementclient/0.30.2 Azure-SDK-For-Python AZURECLI/TEST/0.1.1b2+dev]
+ User-Agent: [python/2.7.11 (Windows-10-10.0.14393) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 resourcemanagementclient/0.30.2 Azure-SDK-For-Python
+ AZURECLI/TEST/2.0.2+dev]
accept-language: [en-US]
- x-ms-client-request-id: [4c40fd54-e75c-11e6-b444-64510658e3b3]
+ x-ms-client-request-id: [521dbecf-1a26-11e7-9c8c-a0b3ccf7272a]
method: GET
- uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_vmss_create_none_options/providers/Microsoft.Resources/deployments/mock-deployment/operationStatuses/08587157774566771962?api-version=2016-09-01
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_vmss_create_none_options/providers/Microsoft.Resources/deployments/mock-deployment/operationStatuses/08587101931301183462?api-version=2016-09-01
response:
- body: {string: '{"status":"Running"}'}
+ body: {string: !!python/unicode '{"status":"Running"}'}
headers:
- Cache-Control: [no-cache]
- Content-Type: [application/json; charset=utf-8]
- Date: ['Tue, 31 Jan 2017 02:24:50 GMT']
- Expires: ['-1']
- Pragma: [no-cache]
- Strict-Transport-Security: [max-age=31536000; includeSubDomains]
- Vary: [Accept-Encoding]
+ cache-control: [no-cache]
content-length: ['20']
+ content-type: [application/json; charset=utf-8]
+ date: ['Wed, 05 Apr 2017 17:36:55 GMT']
+ expires: ['-1']
+ pragma: [no-cache]
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ vary: [Accept-Encoding]
status: {code: 200, message: OK}
- request:
body: null
@@ -232,23 +229,24 @@ interactions:
Accept-Encoding: ['gzip, deflate']
Connection: [keep-alive]
Content-Type: [application/json; charset=utf-8]
- User-Agent: [python/3.5.0 (Windows-10.0.14393) requests/2.9.1 msrest/0.4.4 msrest_azure/0.4.7
- resourcemanagementclient/0.30.2 Azure-SDK-For-Python AZURECLI/TEST/0.1.1b2+dev]
+ User-Agent: [python/2.7.11 (Windows-10-10.0.14393) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 resourcemanagementclient/0.30.2 Azure-SDK-For-Python
+ AZURECLI/TEST/2.0.2+dev]
accept-language: [en-US]
- x-ms-client-request-id: [4c40fd54-e75c-11e6-b444-64510658e3b3]
+ x-ms-client-request-id: [521dbecf-1a26-11e7-9c8c-a0b3ccf7272a]
method: GET
- uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_vmss_create_none_options/providers/Microsoft.Resources/deployments/mock-deployment/operationStatuses/08587157774566771962?api-version=2016-09-01
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_vmss_create_none_options/providers/Microsoft.Resources/deployments/mock-deployment/operationStatuses/08587101931301183462?api-version=2016-09-01
response:
- body: {string: '{"status":"Running"}'}
+ body: {string: !!python/unicode '{"status":"Running"}'}
headers:
- Cache-Control: [no-cache]
- Content-Type: [application/json; charset=utf-8]
- Date: ['Tue, 31 Jan 2017 02:25:20 GMT']
- Expires: ['-1']
- Pragma: [no-cache]
- Strict-Transport-Security: [max-age=31536000; includeSubDomains]
- Vary: [Accept-Encoding]
+ cache-control: [no-cache]
content-length: ['20']
+ content-type: [application/json; charset=utf-8]
+ date: ['Wed, 05 Apr 2017 17:37:26 GMT']
+ expires: ['-1']
+ pragma: [no-cache]
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ vary: [Accept-Encoding]
status: {code: 200, message: OK}
- request:
body: null
@@ -257,23 +255,24 @@ interactions:
Accept-Encoding: ['gzip, deflate']
Connection: [keep-alive]
Content-Type: [application/json; charset=utf-8]
- User-Agent: [python/3.5.0 (Windows-10.0.14393) requests/2.9.1 msrest/0.4.4 msrest_azure/0.4.7
- resourcemanagementclient/0.30.2 Azure-SDK-For-Python AZURECLI/TEST/0.1.1b2+dev]
+ User-Agent: [python/2.7.11 (Windows-10-10.0.14393) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 resourcemanagementclient/0.30.2 Azure-SDK-For-Python
+ AZURECLI/TEST/2.0.2+dev]
accept-language: [en-US]
- x-ms-client-request-id: [4c40fd54-e75c-11e6-b444-64510658e3b3]
+ x-ms-client-request-id: [521dbecf-1a26-11e7-9c8c-a0b3ccf7272a]
method: GET
- uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_vmss_create_none_options/providers/Microsoft.Resources/deployments/mock-deployment/operationStatuses/08587157774566771962?api-version=2016-09-01
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_vmss_create_none_options/providers/Microsoft.Resources/deployments/mock-deployment/operationStatuses/08587101931301183462?api-version=2016-09-01
response:
- body: {string: '{"status":"Running"}'}
+ body: {string: !!python/unicode '{"status":"Running"}'}
headers:
- Cache-Control: [no-cache]
- Content-Type: [application/json; charset=utf-8]
- Date: ['Tue, 31 Jan 2017 02:25:49 GMT']
- Expires: ['-1']
- Pragma: [no-cache]
- Strict-Transport-Security: [max-age=31536000; includeSubDomains]
- Vary: [Accept-Encoding]
+ cache-control: [no-cache]
content-length: ['20']
+ content-type: [application/json; charset=utf-8]
+ date: ['Wed, 05 Apr 2017 17:37:56 GMT']
+ expires: ['-1']
+ pragma: [no-cache]
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ vary: [Accept-Encoding]
status: {code: 200, message: OK}
- request:
body: null
@@ -282,23 +281,52 @@ interactions:
Accept-Encoding: ['gzip, deflate']
Connection: [keep-alive]
Content-Type: [application/json; charset=utf-8]
- User-Agent: [python/3.5.0 (Windows-10.0.14393) requests/2.9.1 msrest/0.4.4 msrest_azure/0.4.7
- resourcemanagementclient/0.30.2 Azure-SDK-For-Python AZURECLI/TEST/0.1.1b2+dev]
+ User-Agent: [python/2.7.11 (Windows-10-10.0.14393) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 resourcemanagementclient/0.30.2 Azure-SDK-For-Python
+ AZURECLI/TEST/2.0.2+dev]
accept-language: [en-US]
- x-ms-client-request-id: [4c40fd54-e75c-11e6-b444-64510658e3b3]
+ x-ms-client-request-id: [521dbecf-1a26-11e7-9c8c-a0b3ccf7272a]
method: GET
- uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_vmss_create_none_options/providers/Microsoft.Resources/deployments/mock-deployment/operationStatuses/08587157774566771962?api-version=2016-09-01
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_vmss_create_none_options/providers/Microsoft.Resources/deployments/mock-deployment/operationStatuses/08587101931301183462?api-version=2016-09-01
response:
- body: {string: '{"status":"Running"}'}
+ body: {string: !!python/unicode '{"status":"Succeeded"}'}
+ headers:
+ cache-control: [no-cache]
+ content-length: ['22']
+ content-type: [application/json; charset=utf-8]
+ date: ['Wed, 05 Apr 2017 17:38:26 GMT']
+ expires: ['-1']
+ pragma: [no-cache]
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ vary: [Accept-Encoding]
+ status: {code: 200, message: OK}
+- request:
+ body: null
headers:
- Cache-Control: [no-cache]
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
Content-Type: [application/json; charset=utf-8]
- Date: ['Tue, 31 Jan 2017 02:26:20 GMT']
- Expires: ['-1']
- Pragma: [no-cache]
- Strict-Transport-Security: [max-age=31536000; includeSubDomains]
- Vary: [Accept-Encoding]
- content-length: ['20']
+ User-Agent: [python/2.7.11 (Windows-10-10.0.14393) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 resourcemanagementclient/0.30.2 Azure-SDK-For-Python
+ AZURECLI/TEST/2.0.2+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [521dbecf-1a26-11e7-9c8c-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_vmss_create_none_options/providers/Microsoft.Resources/deployments/mock-deployment?api-version=2016-09-01
+ response:
+ body: {string: !!python/unicode '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_vmss_create_none_options/providers/Microsoft.Resources/deployments/vmss_deploy_oexSGEOf0DQtjm9VsFUp9ahA99kjuRkp","name":"vmss_deploy_oexSGEOf0DQtjm9VsFUp9ahA99kjuRkp","properties":{"templateHash":"15286152982969927295","parameters":{},"mode":"Incremental","provisioningState":"Succeeded","timestamp":"2017-04-05T17:38:07.7332452Z","duration":"PT2M12.3739722S","correlationId":"a0d7e709-a029-4ad7-9523-a2975d4f2751","providers":[{"namespace":"Microsoft.Network","resourceTypes":[{"resourceType":"virtualNetworks","locations":["westus"]}]},{"namespace":"Microsoft.Compute","resourceTypes":[{"resourceType":"virtualMachineScaleSets","locations":["westus"]}]}],"dependencies":[{"dependsOn":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_vmss_create_none_options/providers/Microsoft.Network/virtualNetworks/vmss1VNET","resourceType":"Microsoft.Network/virtualNetworks","resourceName":"vmss1VNET"}],"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_vmss_create_none_options/providers/Microsoft.Compute/virtualMachineScaleSets/vmss1","resourceType":"Microsoft.Compute/virtualMachineScaleSets","resourceName":"vmss1"}],"outputs":{"vmss":{"type":"Object","value":{"singlePlacementGroup":true,"upgradePolicy":{"mode":"Manual"},"virtualMachineProfile":{"osProfile":{"computerNamePrefix":"vmss1247x","adminUsername":"ubuntu","linuxConfiguration":{"disablePasswordAuthentication":true,"ssh":{"publicKeys":[{"path":"/home/ubuntu/.ssh/authorized_keys","keyData":"ssh-rsa
+ AAAAB3NzaC1yc2EAAAADAQABAAACAQCbIg1guRHbI0lV11wWDt1r2cUdcNd27CJsg+SfgC7miZeubtwUhbsPdhMQsfDyhOWHq1+ZL0M+nJZV63d/1dhmhtgyOqejUwrPlzKhydsbrsdUor+JmNJDdW01v7BXHyuymT8G4s09jCasNOwiufbP/qp72ruu0bIA1nySsvlf9pCQAuFkAnVnf/rFhUlOkhtRpwcq8SUNY2zRHR/EKb/4NWY1JzR4sa3q2fWIJdrrX0DvLoa5g9bIEd4Df79ba7v+yiUBOS0zT2ll+z4g9izHK3EO5d8hL4jYxcjKs+wcslSYRWrascfscLgMlMGh0CdKeNTDjHpGPncaf3Z+FwwwjWeuiNBxv7bJo13/8B/098KlVDl4GZqsoBCEjPyJfV6hO0y/LkRGkk7oHWKgeWAfKtfLItRp00eZ4fcJNK9kCaSMmEugoZWcI7NGbZXzqFWqbpRI7NcDP9+WIQ+i9U5vqWsqd/zng4kbuAJ6UuKqIzB0upYrLShfQE3SAck8oaLhJqqq56VfDuASNpJKidV+zq27HfSBmbXnkR/5AK337dc3MXKJypoK/QPMLKUAP5XLPbs+NddJQV7EZXd29DLgp+fRIg3edpKdO7ZErWhv7d+3Kws+e1Y+ypmR2WIVSwVyBEUfgv2C8Ts9gnTF4pNcEY/S2aBicz5Ew2+jdyGNQQ==
+ [email protected]\n"}]}},"secrets":[]},"storageProfile":{"osDisk":{"createOption":"FromImage","caching":"ReadWrite","managedDisk":{"storageAccountType":"Standard_LRS"}},"imageReference":{"publisher":"credativ","offer":"Debian","sku":"8","version":"latest"}},"networkProfile":{"networkInterfaceConfigurations":[{"name":"vmss1247xNic","properties":{"primary":true,"ipConfigurations":[{"name":"vmss1247xIPConfig","properties":{"subnet":{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_vmss_create_none_options/providers/Microsoft.Network/virtualNetworks/vmss1VNET/subnets/vmss1Subnet"}}}]}}]}},"provisioningState":"Succeeded","overprovision":true,"uniqueId":"26fcde72-e107-4262-8d44-9a168484a948"}}},"outputResources":[{"id":"Microsoft.Compute/virtualMachineScaleSets/vmss1"},{"id":"Microsoft.Network/virtualNetworks/vmss1VNET"}]}}'}
+ headers:
+ cache-control: [no-cache]
+ content-length: ['3210']
+ content-type: [application/json; charset=utf-8]
+ date: ['Wed, 05 Apr 2017 17:38:27 GMT']
+ expires: ['-1']
+ pragma: [no-cache]
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ vary: [Accept-Encoding]
status: {code: 200, message: OK}
- request:
body: null
@@ -307,23 +335,173 @@ interactions:
Accept-Encoding: ['gzip, deflate']
Connection: [keep-alive]
Content-Type: [application/json; charset=utf-8]
- User-Agent: [python/3.5.0 (Windows-10.0.14393) requests/2.9.1 msrest/0.4.4 msrest_azure/0.4.7
- resourcemanagementclient/0.30.2 Azure-SDK-For-Python AZURECLI/TEST/0.1.1b2+dev]
+ User-Agent: [python/2.7.11 (Windows-10-10.0.14393) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 computemanagementclient/0.33.1rc1 Azure-SDK-For-Python
+ AZURECLI/TEST/2.0.2+dev]
accept-language: [en-US]
- x-ms-client-request-id: [4c40fd54-e75c-11e6-b444-64510658e3b3]
+ x-ms-client-request-id: [ae4e0e80-1a26-11e7-8908-a0b3ccf7272a]
method: GET
- uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_vmss_create_none_options/providers/Microsoft.Resources/deployments/mock-deployment/operationStatuses/08587157774566771962?api-version=2016-09-01
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_vmss_create_none_options/providers/Microsoft.Compute/virtualMachineScaleSets/vmss1?api-version=2016-04-30-preview
response:
- body: {string: '{"status":"Succeeded"}'}
+ body: {string: !!python/unicode "{\r\n \"sku\": {\r\n \"name\": \"Standard_D1_v2\"\
+ ,\r\n \"tier\": \"Standard\",\r\n \"capacity\": 2\r\n },\r\n \"properties\"\
+ : {\r\n \"singlePlacementGroup\": true,\r\n \"upgradePolicy\": {\r\n\
+ \ \"mode\": \"Manual\"\r\n },\r\n \"virtualMachineProfile\": {\r\
+ \n \"osProfile\": {\r\n \"computerNamePrefix\": \"vmss1247x\"\
+ ,\r\n \"adminUsername\": \"ubuntu\",\r\n \"linuxConfiguration\"\
+ : {\r\n \"disablePasswordAuthentication\": true,\r\n \"\
+ ssh\": {\r\n \"publicKeys\": [\r\n {\r\n \
+ \ \"path\": \"/home/ubuntu/.ssh/authorized_keys\",\r\n \
+ \ \"keyData\": \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQCbIg1guRHbI0lV11wWDt1r2cUdcNd27CJsg+SfgC7miZeubtwUhbsPdhMQsfDyhOWHq1+ZL0M+nJZV63d/1dhmhtgyOqejUwrPlzKhydsbrsdUor+JmNJDdW01v7BXHyuymT8G4s09jCasNOwiufbP/qp72ruu0bIA1nySsvlf9pCQAuFkAnVnf/rFhUlOkhtRpwcq8SUNY2zRHR/EKb/4NWY1JzR4sa3q2fWIJdrrX0DvLoa5g9bIEd4Df79ba7v+yiUBOS0zT2ll+z4g9izHK3EO5d8hL4jYxcjKs+wcslSYRWrascfscLgMlMGh0CdKeNTDjHpGPncaf3Z+FwwwjWeuiNBxv7bJo13/8B/098KlVDl4GZqsoBCEjPyJfV6hO0y/LkRGkk7oHWKgeWAfKtfLItRp00eZ4fcJNK9kCaSMmEugoZWcI7NGbZXzqFWqbpRI7NcDP9+WIQ+i9U5vqWsqd/zng4kbuAJ6UuKqIzB0upYrLShfQE3SAck8oaLhJqqq56VfDuASNpJKidV+zq27HfSBmbXnkR/5AK337dc3MXKJypoK/QPMLKUAP5XLPbs+NddJQV7EZXd29DLgp+fRIg3edpKdO7ZErWhv7d+3Kws+e1Y+ypmR2WIVSwVyBEUfgv2C8Ts9gnTF4pNcEY/S2aBicz5Ew2+jdyGNQQ==\
+ \ [email protected]\\n\"\r\n }\r\n ]\r\n }\r\
+ \n },\r\n \"secrets\": []\r\n },\r\n \"storageProfile\"\
+ : {\r\n \"osDisk\": {\r\n \"createOption\": \"FromImage\"\
+ ,\r\n \"caching\": \"ReadWrite\",\r\n \"managedDisk\": {\r\
+ \n \"storageAccountType\": \"Standard_LRS\"\r\n }\r\n\
+ \ },\r\n \"imageReference\": {\r\n \"publisher\": \"\
+ credativ\",\r\n \"offer\": \"Debian\",\r\n \"sku\": \"8\"\
+ ,\r\n \"version\": \"latest\"\r\n }\r\n },\r\n \"\
+ networkProfile\": {\"networkInterfaceConfigurations\":[{\"name\":\"vmss1247xNic\"\
+ ,\"properties\":{\"primary\":true,\"ipConfigurations\":[{\"name\":\"vmss1247xIPConfig\"\
+ ,\"properties\":{\"subnet\":{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_vmss_create_none_options/providers/Microsoft.Network/virtualNetworks/vmss1VNET/subnets/vmss1Subnet\"\
+ }}}]}}]}\r\n },\r\n \"provisioningState\": \"Succeeded\",\r\n \"\
+ overprovision\": true,\r\n \"uniqueId\": \"26fcde72-e107-4262-8d44-9a168484a948\"\
+ \r\n },\r\n \"type\": \"Microsoft.Compute/virtualMachineScaleSets\",\r\n\
+ \ \"location\": \"westus\",\r\n \"tags\": {},\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_vmss_create_none_options/providers/Microsoft.Compute/virtualMachineScaleSets/vmss1\"\
+ ,\r\n \"name\": \"vmss1\"\r\n}"}
+ headers:
+ cache-control: [no-cache]
+ content-length: ['2621']
+ content-type: [application/json; charset=utf-8]
+ date: ['Wed, 05 Apr 2017 17:38:29 GMT']
+ expires: ['-1']
+ pragma: [no-cache]
+ server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ transfer-encoding: [chunked]
+ vary: [Accept-Encoding]
+ status: {code: 200, message: OK}
+- request:
+ body: null
headers:
- Cache-Control: [no-cache]
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
Content-Type: [application/json; charset=utf-8]
- Date: ['Tue, 31 Jan 2017 02:26:50 GMT']
- Expires: ['-1']
- Pragma: [no-cache]
- Strict-Transport-Security: [max-age=31536000; includeSubDomains]
- Vary: [Accept-Encoding]
- content-length: ['22']
+ User-Agent: [python/2.7.11 (Windows-10-10.0.14393) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 computemanagementclient/0.33.1rc1 Azure-SDK-For-Python
+ AZURECLI/TEST/2.0.2+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [af4f2acf-1a26-11e7-8c8b-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_vmss_create_none_options/providers/Microsoft.Compute/virtualMachineScaleSets/vmss1?api-version=2016-04-30-preview
+ response:
+ body: {string: !!python/unicode "{\r\n \"sku\": {\r\n \"name\": \"Standard_D1_v2\"\
+ ,\r\n \"tier\": \"Standard\",\r\n \"capacity\": 2\r\n },\r\n \"properties\"\
+ : {\r\n \"singlePlacementGroup\": true,\r\n \"upgradePolicy\": {\r\n\
+ \ \"mode\": \"Manual\"\r\n },\r\n \"virtualMachineProfile\": {\r\
+ \n \"osProfile\": {\r\n \"computerNamePrefix\": \"vmss1247x\"\
+ ,\r\n \"adminUsername\": \"ubuntu\",\r\n \"linuxConfiguration\"\
+ : {\r\n \"disablePasswordAuthentication\": true,\r\n \"\
+ ssh\": {\r\n \"publicKeys\": [\r\n {\r\n \
+ \ \"path\": \"/home/ubuntu/.ssh/authorized_keys\",\r\n \
+ \ \"keyData\": \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQCbIg1guRHbI0lV11wWDt1r2cUdcNd27CJsg+SfgC7miZeubtwUhbsPdhMQsfDyhOWHq1+ZL0M+nJZV63d/1dhmhtgyOqejUwrPlzKhydsbrsdUor+JmNJDdW01v7BXHyuymT8G4s09jCasNOwiufbP/qp72ruu0bIA1nySsvlf9pCQAuFkAnVnf/rFhUlOkhtRpwcq8SUNY2zRHR/EKb/4NWY1JzR4sa3q2fWIJdrrX0DvLoa5g9bIEd4Df79ba7v+yiUBOS0zT2ll+z4g9izHK3EO5d8hL4jYxcjKs+wcslSYRWrascfscLgMlMGh0CdKeNTDjHpGPncaf3Z+FwwwjWeuiNBxv7bJo13/8B/098KlVDl4GZqsoBCEjPyJfV6hO0y/LkRGkk7oHWKgeWAfKtfLItRp00eZ4fcJNK9kCaSMmEugoZWcI7NGbZXzqFWqbpRI7NcDP9+WIQ+i9U5vqWsqd/zng4kbuAJ6UuKqIzB0upYrLShfQE3SAck8oaLhJqqq56VfDuASNpJKidV+zq27HfSBmbXnkR/5AK337dc3MXKJypoK/QPMLKUAP5XLPbs+NddJQV7EZXd29DLgp+fRIg3edpKdO7ZErWhv7d+3Kws+e1Y+ypmR2WIVSwVyBEUfgv2C8Ts9gnTF4pNcEY/S2aBicz5Ew2+jdyGNQQ==\
+ \ [email protected]\\n\"\r\n }\r\n ]\r\n }\r\
+ \n },\r\n \"secrets\": []\r\n },\r\n \"storageProfile\"\
+ : {\r\n \"osDisk\": {\r\n \"createOption\": \"FromImage\"\
+ ,\r\n \"caching\": \"ReadWrite\",\r\n \"managedDisk\": {\r\
+ \n \"storageAccountType\": \"Standard_LRS\"\r\n }\r\n\
+ \ },\r\n \"imageReference\": {\r\n \"publisher\": \"\
+ credativ\",\r\n \"offer\": \"Debian\",\r\n \"sku\": \"8\"\
+ ,\r\n \"version\": \"latest\"\r\n }\r\n },\r\n \"\
+ networkProfile\": {\"networkInterfaceConfigurations\":[{\"name\":\"vmss1247xNic\"\
+ ,\"properties\":{\"primary\":true,\"ipConfigurations\":[{\"name\":\"vmss1247xIPConfig\"\
+ ,\"properties\":{\"subnet\":{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_vmss_create_none_options/providers/Microsoft.Network/virtualNetworks/vmss1VNET/subnets/vmss1Subnet\"\
+ }}}]}}]}\r\n },\r\n \"provisioningState\": \"Succeeded\",\r\n \"\
+ overprovision\": true,\r\n \"uniqueId\": \"26fcde72-e107-4262-8d44-9a168484a948\"\
+ \r\n },\r\n \"type\": \"Microsoft.Compute/virtualMachineScaleSets\",\r\n\
+ \ \"location\": \"westus\",\r\n \"tags\": {},\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_vmss_create_none_options/providers/Microsoft.Compute/virtualMachineScaleSets/vmss1\"\
+ ,\r\n \"name\": \"vmss1\"\r\n}"}
+ headers:
+ cache-control: [no-cache]
+ content-length: ['2621']
+ content-type: [application/json; charset=utf-8]
+ date: ['Wed, 05 Apr 2017 17:38:30 GMT']
+ expires: ['-1']
+ pragma: [no-cache]
+ server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ transfer-encoding: [chunked]
+ vary: [Accept-Encoding]
+ status: {code: 200, message: OK}
+- request:
+ body: !!python/unicode '{"sku": {"tier": "Standard", "capacity": 2, "name": "Standard_D1_v2"},
+ "location": "westus", "properties": {"singlePlacementGroup": true, "overprovision":
+ true, "virtualMachineProfile": {"storageProfile": {"imageReference": {"sku":
+ "8", "publisher": "credativ", "version": "latest", "offer": "Debian"}, "osDisk":
+ {"caching": "ReadWrite", "managedDisk": {"storageAccountType": "Standard_LRS"},
+ "createOption": "fromImage"}}, "osProfile": {"secrets": [], "adminUsername":
+ "ubuntu", "computerNamePrefix": "vmss1247x", "linuxConfiguration": {"ssh": {"publicKeys":
+ [{"path": "/home/ubuntu/.ssh/authorized_keys", "keyData": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQCbIg1guRHbI0lV11wWDt1r2cUdcNd27CJsg+SfgC7miZeubtwUhbsPdhMQsfDyhOWHq1+ZL0M+nJZV63d/1dhmhtgyOqejUwrPlzKhydsbrsdUor+JmNJDdW01v7BXHyuymT8G4s09jCasNOwiufbP/qp72ruu0bIA1nySsvlf9pCQAuFkAnVnf/rFhUlOkhtRpwcq8SUNY2zRHR/EKb/4NWY1JzR4sa3q2fWIJdrrX0DvLoa5g9bIEd4Df79ba7v+yiUBOS0zT2ll+z4g9izHK3EO5d8hL4jYxcjKs+wcslSYRWrascfscLgMlMGh0CdKeNTDjHpGPncaf3Z+FwwwjWeuiNBxv7bJo13/8B/098KlVDl4GZqsoBCEjPyJfV6hO0y/LkRGkk7oHWKgeWAfKtfLItRp00eZ4fcJNK9kCaSMmEugoZWcI7NGbZXzqFWqbpRI7NcDP9+WIQ+i9U5vqWsqd/zng4kbuAJ6UuKqIzB0upYrLShfQE3SAck8oaLhJqqq56VfDuASNpJKidV+zq27HfSBmbXnkR/5AK337dc3MXKJypoK/QPMLKUAP5XLPbs+NddJQV7EZXd29DLgp+fRIg3edpKdO7ZErWhv7d+3Kws+e1Y+ypmR2WIVSwVyBEUfgv2C8Ts9gnTF4pNcEY/S2aBicz5Ew2+jdyGNQQ==
+ [email protected]\n"}]}, "disablePasswordAuthentication": true}}, "networkProfile":
+ {"networkInterfaceConfigurations": [{"properties": {"primary": true, "ipConfigurations":
+ [{"properties": {"subnet": {"id": "/subscriptions/0b1f6471-1bf0-4dda-aec3-cb9272f09590/resourceGroups/cli_test_vmss_create_none_options/providers/Microsoft.Network/virtualNetworks/vmss1VNET/subnets/vmss1Subnet"}},
+ "name": "vmss1247xIPConfig"}]}, "name": "vmss1247xNic"}]}}, "upgradePolicy":
+ {"mode": "Manual"}}, "tags": {"test": "success"}}'
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Length: ['1862']
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/2.7.11 (Windows-10-10.0.14393) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 computemanagementclient/0.33.1rc1 Azure-SDK-For-Python
+ AZURECLI/TEST/2.0.2+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [af81374f-1a26-11e7-b7f5-a0b3ccf7272a]
+ method: PUT
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_vmss_create_none_options/providers/Microsoft.Compute/virtualMachineScaleSets/vmss1?api-version=2016-04-30-preview
+ response:
+ body: {string: !!python/unicode "{\r\n \"sku\": {\r\n \"name\": \"Standard_D1_v2\"\
+ ,\r\n \"tier\": \"Standard\",\r\n \"capacity\": 2\r\n },\r\n \"properties\"\
+ : {\r\n \"singlePlacementGroup\": true,\r\n \"upgradePolicy\": {\r\n\
+ \ \"mode\": \"Manual\"\r\n },\r\n \"virtualMachineProfile\": {\r\
+ \n \"osProfile\": {\r\n \"computerNamePrefix\": \"vmss1247x\"\
+ ,\r\n \"adminUsername\": \"ubuntu\",\r\n \"linuxConfiguration\"\
+ : {\r\n \"disablePasswordAuthentication\": true,\r\n \"\
+ ssh\": {\r\n \"publicKeys\": [\r\n {\r\n \
+ \ \"path\": \"/home/ubuntu/.ssh/authorized_keys\",\r\n \
+ \ \"keyData\": \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQCbIg1guRHbI0lV11wWDt1r2cUdcNd27CJsg+SfgC7miZeubtwUhbsPdhMQsfDyhOWHq1+ZL0M+nJZV63d/1dhmhtgyOqejUwrPlzKhydsbrsdUor+JmNJDdW01v7BXHyuymT8G4s09jCasNOwiufbP/qp72ruu0bIA1nySsvlf9pCQAuFkAnVnf/rFhUlOkhtRpwcq8SUNY2zRHR/EKb/4NWY1JzR4sa3q2fWIJdrrX0DvLoa5g9bIEd4Df79ba7v+yiUBOS0zT2ll+z4g9izHK3EO5d8hL4jYxcjKs+wcslSYRWrascfscLgMlMGh0CdKeNTDjHpGPncaf3Z+FwwwjWeuiNBxv7bJo13/8B/098KlVDl4GZqsoBCEjPyJfV6hO0y/LkRGkk7oHWKgeWAfKtfLItRp00eZ4fcJNK9kCaSMmEugoZWcI7NGbZXzqFWqbpRI7NcDP9+WIQ+i9U5vqWsqd/zng4kbuAJ6UuKqIzB0upYrLShfQE3SAck8oaLhJqqq56VfDuASNpJKidV+zq27HfSBmbXnkR/5AK337dc3MXKJypoK/QPMLKUAP5XLPbs+NddJQV7EZXd29DLgp+fRIg3edpKdO7ZErWhv7d+3Kws+e1Y+ypmR2WIVSwVyBEUfgv2C8Ts9gnTF4pNcEY/S2aBicz5Ew2+jdyGNQQ==\
+ \ [email protected]\\n\"\r\n }\r\n ]\r\n }\r\
+ \n },\r\n \"secrets\": []\r\n },\r\n \"storageProfile\"\
+ : {\r\n \"osDisk\": {\r\n \"createOption\": \"FromImage\"\
+ ,\r\n \"caching\": \"ReadWrite\",\r\n \"managedDisk\": {\r\
+ \n \"storageAccountType\": \"Standard_LRS\"\r\n }\r\n\
+ \ },\r\n \"imageReference\": {\r\n \"publisher\": \"\
+ credativ\",\r\n \"offer\": \"Debian\",\r\n \"sku\": \"8\"\
+ ,\r\n \"version\": \"latest\"\r\n }\r\n },\r\n \"\
+ networkProfile\": {\"networkInterfaceConfigurations\":[{\"name\":\"vmss1247xNic\"\
+ ,\"properties\":{\"primary\":true,\"ipConfigurations\":[{\"name\":\"vmss1247xIPConfig\"\
+ ,\"properties\":{\"subnet\":{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_vmss_create_none_options/providers/Microsoft.Network/virtualNetworks/vmss1VNET/subnets/vmss1Subnet\"\
+ }}}]}}]}\r\n },\r\n \"provisioningState\": \"Updating\",\r\n \"overprovision\"\
+ : true,\r\n \"uniqueId\": \"26fcde72-e107-4262-8d44-9a168484a948\"\r\n\
+ \ },\r\n \"type\": \"Microsoft.Compute/virtualMachineScaleSets\",\r\n \"\
+ location\": \"westus\",\r\n \"tags\": {\r\n \"test\": \"success\"\r\n\
+ \ },\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_vmss_create_none_options/providers/Microsoft.Compute/virtualMachineScaleSets/vmss1\"\
+ ,\r\n \"name\": \"vmss1\"\r\n}"}
+ headers:
+ azure-asyncoperation: ['https://management.azure.com/subscriptions/0b1f6471-1bf0-4dda-aec3-cb9272f09590/providers/Microsoft.Compute/locations/westus/operations/b215c3c1-c984-4c12-84d7-8c01b94d6295?api-version=2016-04-30-preview']
+ cache-control: [no-cache]
+ content-length: ['2647']
+ content-type: [application/json; charset=utf-8]
+ date: ['Wed, 05 Apr 2017 17:38:32 GMT']
+ expires: ['-1']
+ pragma: [no-cache]
+ server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ transfer-encoding: [chunked]
+ vary: [Accept-Encoding]
+ x-ms-ratelimit-remaining-subscription-writes: ['1198']
status: {code: 200, message: OK}
- request:
body: null
@@ -332,25 +510,58 @@ interactions:
Accept-Encoding: ['gzip, deflate']
Connection: [keep-alive]
Content-Type: [application/json; charset=utf-8]
- User-Agent: [python/3.5.0 (Windows-10.0.14393) requests/2.9.1 msrest/0.4.4 msrest_azure/0.4.7
- resourcemanagementclient/0.30.2 Azure-SDK-For-Python AZURECLI/TEST/0.1.1b2+dev]
+ User-Agent: [python/2.7.11 (Windows-10-10.0.14393) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 computemanagementclient/0.33.1rc1 Azure-SDK-For-Python
+ AZURECLI/TEST/2.0.2+dev]
accept-language: [en-US]
- x-ms-client-request-id: [4c40fd54-e75c-11e6-b444-64510658e3b3]
+ x-ms-client-request-id: [af81374f-1a26-11e7-b7f5-a0b3ccf7272a]
method: GET
- uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/cli_test_vmss_create_none_options/providers/Microsoft.Resources/deployments/mock-deployment?api-version=2016-09-01
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus/operations/b215c3c1-c984-4c12-84d7-8c01b94d6295?api-version=2016-04-30-preview
response:
- body: {string: '{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_vmss_create_none_options/providers/Microsoft.Resources/deployments/vmss_deploy_N5L40IU9kvBddIGfYy2Gb51rQeR9Fol0","name":"vmss_deploy_N5L40IU9kvBddIGfYy2Gb51rQeR9Fol0","properties":{"parameters":{},"mode":"Incremental","provisioningState":"Succeeded","timestamp":"2017-01-31T02:26:33.932548Z","duration":"PT2M45.1320954S","correlationId":"fa45a063-c769-4a35-9c91-7c86415baa2e","providers":[{"namespace":"Microsoft.Network","resourceTypes":[{"resourceType":"virtualNetworks","locations":["westus"]}]},{"namespace":"Microsoft.Compute","resourceTypes":[{"resourceType":"virtualMachineScaleSets","locations":["westus"]}]}],"dependencies":[{"dependsOn":[{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_vmss_create_none_options/providers/Microsoft.Network/virtualNetworks/vmss1VNET","resourceType":"Microsoft.Network/virtualNetworks","resourceName":"vmss1VNET"}],"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_vmss_create_none_options/providers/Microsoft.Compute/virtualMachineScaleSets/vmss1","resourceType":"Microsoft.Compute/virtualMachineScaleSets","resourceName":"vmss1"}],"outputs":{"vmss":{"type":"Object","value":{"singlePlacementGroup":true,"upgradePolicy":{"mode":"Manual"},"virtualMachineProfile":{"osProfile":{"computerNamePrefix":"vmss11i6r","adminUsername":"yugangw","linuxConfiguration":{"disablePasswordAuthentication":true,"ssh":{"publicKeys":[{"path":"/home/yugangw/.ssh/authorized_keys","keyData":"ssh-rsa
- AAAAB3NzaC1yc2EAAAADAQABAAACAQCbIg1guRHbI0lV11wWDt1r2cUdcNd27CJsg+SfgC7miZeubtwUhbsPdhMQsfDyhOWHq1+ZL0M+nJZV63d/1dhmhtgyOqejUwrPlzKhydsbrsdUor+JmNJDdW01v7BXHyuymT8G4s09jCasNOwiufbP/qp72ruu0bIA1nySsvlf9pCQAuFkAnVnf/rFhUlOkhtRpwcq8SUNY2zRHR/EKb/4NWY1JzR4sa3q2fWIJdrrX0DvLoa5g9bIEd4Df79ba7v+yiUBOS0zT2ll+z4g9izHK3EO5d8hL4jYxcjKs+wcslSYRWrascfscLgMlMGh0CdKeNTDjHpGPncaf3Z+FwwwjWeuiNBxv7bJo13/8B/098KlVDl4GZqsoBCEjPyJfV6hO0y/LkRGkk7oHWKgeWAfKtfLItRp00eZ4fcJNK9kCaSMmEugoZWcI7NGbZXzqFWqbpRI7NcDP9+WIQ+i9U5vqWsqd/zng4kbuAJ6UuKqIzB0upYrLShfQE3SAck8oaLhJqqq56VfDuASNpJKidV+zq27HfSBmbXnkR/5AK337dc3MXKJypoK/QPMLKUAP5XLPbs+NddJQV7EZXd29DLgp+fRIg3edpKdO7ZErWhv7d+3Kws+e1Y+ypmR2WIVSwVyBEUfgv2C8Ts9gnTF4pNcEY/S2aBicz5Ew2+jdyGNQQ==
- [email protected]\n"}]}},"secrets":[]},"storageProfile":{"osDisk":{"createOption":"FromImage","caching":"ReadOnly","managedDisk":{"storageAccountType":"Standard_LRS"}},"imageReference":{"publisher":"credativ","offer":"Debian","sku":"8","version":"latest"}},"networkProfile":{"networkInterfaceConfigurations":[{"name":"vmss11i6rNic","properties":{"primary":true,"ipConfigurations":[{"name":"vmss11i6rIPConfig","properties":{"subnet":{"id":"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_vmss_create_none_options/providers/Microsoft.Network/virtualNetworks/vmss1VNET/subnets/vmss1Subnet"}}}]}}]}},"provisioningState":"Succeeded","overprovision":true,"uniqueId":"c7d80043-b836-4fd6-bd44-09396159b26d"}}},"outputResources":[{"id":"Microsoft.Compute/virtualMachineScaleSets/vmss1"},{"id":"Microsoft.Network/virtualNetworks/vmss1VNET"}]}}'}
+ body: {string: !!python/unicode "{\r\n \"startTime\": \"2017-04-05T17:38:32.9148711+00:00\"\
+ ,\r\n \"status\": \"InProgress\",\r\n \"name\": \"b215c3c1-c984-4c12-84d7-8c01b94d6295\"\
+ \r\n}"}
+ headers:
+ cache-control: [no-cache]
+ content-length: ['134']
+ content-type: [application/json; charset=utf-8]
+ date: ['Wed, 05 Apr 2017 17:39:03 GMT']
+ expires: ['-1']
+ pragma: [no-cache]
+ server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ transfer-encoding: [chunked]
+ vary: [Accept-Encoding]
+ status: {code: 200, message: OK}
+- request:
+ body: null
headers:
- Cache-Control: [no-cache]
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
Content-Type: [application/json; charset=utf-8]
- Date: ['Tue, 31 Jan 2017 02:26:50 GMT']
- Expires: ['-1']
- Pragma: [no-cache]
- Strict-Transport-Security: [max-age=31536000; includeSubDomains]
- Vary: [Accept-Encoding]
- content-length: ['3172']
+ User-Agent: [python/2.7.11 (Windows-10-10.0.14393) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 computemanagementclient/0.33.1rc1 Azure-SDK-For-Python
+ AZURECLI/TEST/2.0.2+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [af81374f-1a26-11e7-b7f5-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus/operations/b215c3c1-c984-4c12-84d7-8c01b94d6295?api-version=2016-04-30-preview
+ response:
+ body: {string: !!python/unicode "{\r\n \"startTime\": \"2017-04-05T17:38:32.9148711+00:00\"\
+ ,\r\n \"status\": \"InProgress\",\r\n \"name\": \"b215c3c1-c984-4c12-84d7-8c01b94d6295\"\
+ \r\n}"}
+ headers:
+ cache-control: [no-cache]
+ content-length: ['134']
+ content-type: [application/json; charset=utf-8]
+ date: ['Wed, 05 Apr 2017 17:39:33 GMT']
+ expires: ['-1']
+ pragma: [no-cache]
+ server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ transfer-encoding: [chunked]
+ vary: [Accept-Encoding]
status: {code: 200, message: OK}
- request:
body: null
@@ -359,50 +570,83 @@ interactions:
Accept-Encoding: ['gzip, deflate']
Connection: [keep-alive]
Content-Type: [application/json; charset=utf-8]
- User-Agent: [python/3.5.0 (Windows-10.0.14393) requests/2.9.1 msrest/0.4.4 msrest_azure/0.4.7
- computemanagementclient/0.33.0 Azure-SDK-For-Python AZURECLI/TEST/0.1.1b2+dev]
+ User-Agent: [python/2.7.11 (Windows-10-10.0.14393) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 computemanagementclient/0.33.1rc1 Azure-SDK-For-Python
+ AZURECLI/TEST/2.0.2+dev]
accept-language: [en-US]
- x-ms-client-request-id: [b99a8a90-e75c-11e6-80eb-64510658e3b3]
+ x-ms-client-request-id: [af81374f-1a26-11e7-b7f5-a0b3ccf7272a]
+ method: GET
+ uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Compute/locations/westus/operations/b215c3c1-c984-4c12-84d7-8c01b94d6295?api-version=2016-04-30-preview
+ response:
+ body: {string: !!python/unicode "{\r\n \"startTime\": \"2017-04-05T17:38:32.9148711+00:00\"\
+ ,\r\n \"endTime\": \"2017-04-05T17:39:52.3494835+00:00\",\r\n \"status\"\
+ : \"Succeeded\",\r\n \"name\": \"b215c3c1-c984-4c12-84d7-8c01b94d6295\"\r\
+ \n}"}
+ headers:
+ cache-control: [no-cache]
+ content-length: ['184']
+ content-type: [application/json; charset=utf-8]
+ date: ['Wed, 05 Apr 2017 17:40:03 GMT']
+ expires: ['-1']
+ pragma: [no-cache]
+ server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ transfer-encoding: [chunked]
+ vary: [Accept-Encoding]
+ status: {code: 200, message: OK}
+- request:
+ body: null
+ headers:
+ Accept: [application/json]
+ Accept-Encoding: ['gzip, deflate']
+ Connection: [keep-alive]
+ Content-Type: [application/json; charset=utf-8]
+ User-Agent: [python/2.7.11 (Windows-10-10.0.14393) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 computemanagementclient/0.33.1rc1 Azure-SDK-For-Python
+ AZURECLI/TEST/2.0.2+dev]
+ accept-language: [en-US]
+ x-ms-client-request-id: [af81374f-1a26-11e7-b7f5-a0b3ccf7272a]
method: GET
uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_vmss_create_none_options/providers/Microsoft.Compute/virtualMachineScaleSets/vmss1?api-version=2016-04-30-preview
response:
- body: {string: "{\r\n \"sku\": {\r\n \"name\": \"Standard_D1_v2\",\r\n \
- \ \"tier\": \"Standard\",\r\n \"capacity\": 2\r\n },\r\n \"properties\"\
+ body: {string: !!python/unicode "{\r\n \"sku\": {\r\n \"name\": \"Standard_D1_v2\"\
+ ,\r\n \"tier\": \"Standard\",\r\n \"capacity\": 2\r\n },\r\n \"properties\"\
: {\r\n \"singlePlacementGroup\": true,\r\n \"upgradePolicy\": {\r\n\
\ \"mode\": \"Manual\"\r\n },\r\n \"virtualMachineProfile\": {\r\
- \n \"osProfile\": {\r\n \"computerNamePrefix\": \"vmss11i6r\"\
- ,\r\n \"adminUsername\": \"yugangw\",\r\n \"linuxConfiguration\"\
+ \n \"osProfile\": {\r\n \"computerNamePrefix\": \"vmss1247x\"\
+ ,\r\n \"adminUsername\": \"ubuntu\",\r\n \"linuxConfiguration\"\
: {\r\n \"disablePasswordAuthentication\": true,\r\n \"\
ssh\": {\r\n \"publicKeys\": [\r\n {\r\n \
- \ \"path\": \"/home/yugangw/.ssh/authorized_keys\",\r\n \
- \ \"keyData\": \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQCbIg1guRHbI0lV11wWDt1r2cUdcNd27CJsg+SfgC7miZeubtwUhbsPdhMQsfDyhOWHq1+ZL0M+nJZV63d/1dhmhtgyOqejUwrPlzKhydsbrsdUor+JmNJDdW01v7BXHyuymT8G4s09jCasNOwiufbP/qp72ruu0bIA1nySsvlf9pCQAuFkAnVnf/rFhUlOkhtRpwcq8SUNY2zRHR/EKb/4NWY1JzR4sa3q2fWIJdrrX0DvLoa5g9bIEd4Df79ba7v+yiUBOS0zT2ll+z4g9izHK3EO5d8hL4jYxcjKs+wcslSYRWrascfscLgMlMGh0CdKeNTDjHpGPncaf3Z+FwwwjWeuiNBxv7bJo13/8B/098KlVDl4GZqsoBCEjPyJfV6hO0y/LkRGkk7oHWKgeWAfKtfLItRp00eZ4fcJNK9kCaSMmEugoZWcI7NGbZXzqFWqbpRI7NcDP9+WIQ+i9U5vqWsqd/zng4kbuAJ6UuKqIzB0upYrLShfQE3SAck8oaLhJqqq56VfDuASNpJKidV+zq27HfSBmbXnkR/5AK337dc3MXKJypoK/QPMLKUAP5XLPbs+NddJQV7EZXd29DLgp+fRIg3edpKdO7ZErWhv7d+3Kws+e1Y+ypmR2WIVSwVyBEUfgv2C8Ts9gnTF4pNcEY/S2aBicz5Ew2+jdyGNQQ==\
+ \ \"path\": \"/home/ubuntu/.ssh/authorized_keys\",\r\n \
+ \ \"keyData\": \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQCbIg1guRHbI0lV11wWDt1r2cUdcNd27CJsg+SfgC7miZeubtwUhbsPdhMQsfDyhOWHq1+ZL0M+nJZV63d/1dhmhtgyOqejUwrPlzKhydsbrsdUor+JmNJDdW01v7BXHyuymT8G4s09jCasNOwiufbP/qp72ruu0bIA1nySsvlf9pCQAuFkAnVnf/rFhUlOkhtRpwcq8SUNY2zRHR/EKb/4NWY1JzR4sa3q2fWIJdrrX0DvLoa5g9bIEd4Df79ba7v+yiUBOS0zT2ll+z4g9izHK3EO5d8hL4jYxcjKs+wcslSYRWrascfscLgMlMGh0CdKeNTDjHpGPncaf3Z+FwwwjWeuiNBxv7bJo13/8B/098KlVDl4GZqsoBCEjPyJfV6hO0y/LkRGkk7oHWKgeWAfKtfLItRp00eZ4fcJNK9kCaSMmEugoZWcI7NGbZXzqFWqbpRI7NcDP9+WIQ+i9U5vqWsqd/zng4kbuAJ6UuKqIzB0upYrLShfQE3SAck8oaLhJqqq56VfDuASNpJKidV+zq27HfSBmbXnkR/5AK337dc3MXKJypoK/QPMLKUAP5XLPbs+NddJQV7EZXd29DLgp+fRIg3edpKdO7ZErWhv7d+3Kws+e1Y+ypmR2WIVSwVyBEUfgv2C8Ts9gnTF4pNcEY/S2aBicz5Ew2+jdyGNQQ==\
\ [email protected]\\n\"\r\n }\r\n ]\r\n }\r\
\n },\r\n \"secrets\": []\r\n },\r\n \"storageProfile\"\
: {\r\n \"osDisk\": {\r\n \"createOption\": \"FromImage\"\
- ,\r\n \"caching\": \"ReadOnly\",\r\n \"managedDisk\": {\r\
+ ,\r\n \"caching\": \"ReadWrite\",\r\n \"managedDisk\": {\r\
\n \"storageAccountType\": \"Standard_LRS\"\r\n }\r\n\
\ },\r\n \"imageReference\": {\r\n \"publisher\": \"\
credativ\",\r\n \"offer\": \"Debian\",\r\n \"sku\": \"8\"\
,\r\n \"version\": \"latest\"\r\n }\r\n },\r\n \"\
- networkProfile\": {\"networkInterfaceConfigurations\":[{\"name\":\"vmss11i6rNic\"\
- ,\"properties\":{\"primary\":true,\"ipConfigurations\":[{\"name\":\"vmss11i6rIPConfig\"\
+ networkProfile\": {\"networkInterfaceConfigurations\":[{\"name\":\"vmss1247xNic\"\
+ ,\"properties\":{\"primary\":true,\"ipConfigurations\":[{\"name\":\"vmss1247xIPConfig\"\
,\"properties\":{\"subnet\":{\"id\":\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_vmss_create_none_options/providers/Microsoft.Network/virtualNetworks/vmss1VNET/subnets/vmss1Subnet\"\
}}}]}}]}\r\n },\r\n \"provisioningState\": \"Succeeded\",\r\n \"\
- overprovision\": true,\r\n \"uniqueId\": \"c7d80043-b836-4fd6-bd44-09396159b26d\"\
+ overprovision\": true,\r\n \"uniqueId\": \"26fcde72-e107-4262-8d44-9a168484a948\"\
\r\n },\r\n \"type\": \"Microsoft.Compute/virtualMachineScaleSets\",\r\n\
- \ \"location\": \"westus\",\r\n \"tags\": {},\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_vmss_create_none_options/providers/Microsoft.Compute/virtualMachineScaleSets/vmss1\"\
+ \ \"location\": \"westus\",\r\n \"tags\": {\r\n \"test\": \"success\"\
+ \r\n },\r\n \"id\": \"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_vmss_create_none_options/providers/Microsoft.Compute/virtualMachineScaleSets/vmss1\"\
,\r\n \"name\": \"vmss1\"\r\n}"}
headers:
- Cache-Control: [no-cache]
- Content-Type: [application/json; charset=utf-8]
- Date: ['Tue, 31 Jan 2017 02:26:51 GMT']
- Expires: ['-1']
- Pragma: [no-cache]
- Server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
- Strict-Transport-Security: [max-age=31536000; includeSubDomains]
- Transfer-Encoding: [chunked]
- Vary: [Accept-Encoding]
- content-length: ['2622']
+ cache-control: [no-cache]
+ content-length: ['2648']
+ content-type: [application/json; charset=utf-8]
+ date: ['Wed, 05 Apr 2017 17:40:03 GMT']
+ expires: ['-1']
+ pragma: [no-cache]
+ server: [Microsoft-HTTPAPI/2.0, Microsoft-HTTPAPI/2.0]
+ strict-transport-security: [max-age=31536000; includeSubDomains]
+ transfer-encoding: [chunked]
+ vary: [Accept-Encoding]
status: {code: 200, message: OK}
- request:
body: null
@@ -411,23 +655,24 @@ interactions:
Accept-Encoding: ['gzip, deflate']
Connection: [keep-alive]
Content-Type: [application/json; charset=utf-8]
- User-Agent: [python/3.5.0 (Windows-10.0.14393) requests/2.9.1 msrest/0.4.4 msrest_azure/0.4.7
- networkmanagementclient/0.30.0 Azure-SDK-For-Python AZURECLI/TEST/0.1.1b2+dev]
+ User-Agent: [python/2.7.11 (Windows-10-10.0.14393) requests/2.9.1 msrest/0.4.6
+ msrest_azure/0.4.7 networkmanagementclient/0.30.1 Azure-SDK-For-Python AZURECLI/TEST/2.0.2+dev]
accept-language: [en-US]
- x-ms-client-request-id: [b9ee28a6-e75c-11e6-a86c-64510658e3b3]
+ x-ms-client-request-id: [e74556cf-1a26-11e7-b110-a0b3ccf7272a]
method: GET
uri: https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/cli_test_vmss_create_none_options/providers/Microsoft.Network/publicIPAddresses/vmss1PublicIP?api-version=2016-09-01
response:
- body: {string: '{"error":{"code":"ResourceNotFound","message":"The Resource ''Microsoft.Network/publicIPAddresses/vmss1PublicIP''
- under resource group ''cli_test_vmss_create_none_options'' was not found."}}'}
+ body: {string: !!python/unicode '{"error":{"code":"ResourceNotFound","message":"The
+ Resource ''Microsoft.Network/publicIPAddresses/vmss1PublicIP'' under resource
+ group ''cli_test_vmss_create_none_options'' was not found."}}'}
headers:
- Cache-Control: [no-cache]
- Content-Length: ['192']
- Content-Type: [application/json; charset=utf-8]
- Date: ['Tue, 31 Jan 2017 02:26:51 GMT']
- Expires: ['-1']
- Pragma: [no-cache]
- Strict-Transport-Security: [max-age=31536000; includeSubDomains]
+ cache-control: [no-cache]
+ content-length: ['192']
+ content-type: [application/json; charset=utf-8]
+ date: ['Wed, 05 Apr 2017 17:40:04 GMT']
+ expires: ['-1']
+ pragma: [no-cache]
+ strict-transport-security: [max-age=31536000; includeSubDomains]
x-ms-failure-cause: [gateway]
status: {code: 404, message: Not Found}
version: 1
diff --git a/src/command_modules/azure-cli-vm/tests/test_vm_commands.py b/src/command_modules/azure-cli-vm/tests/test_vm_commands.py
index e262101db..ac8dd0720 100644
--- a/src/command_modules/azure-cli-vm/tests/test_vm_commands.py
+++ b/src/command_modules/azure-cli-vm/tests/test_vm_commands.py
@@ -512,7 +512,7 @@ class VMNoWaitScenarioTest(ResourceGroupVCRTestBase):
class VMAvailSetScenarioTest(ResourceGroupVCRTestBase):
def __init__(self, test_method):
- super(VMAvailSetScenarioTest, self).__init__(__file__, test_method, resource_group='cliTestRg_Availset')
+ super(VMAvailSetScenarioTest, self).__init__(__file__, test_method, resource_group='cli_test_avail_set')
self.location = 'westus'
self.name = 'availset-test'
@@ -527,6 +527,8 @@ class VMAvailSetScenarioTest(ResourceGroupVCRTestBase):
JMESPathCheck('platformUpdateDomainCount', 2),
JMESPathCheck('sku.managed', True)
])
+ self.cmd('vm availability-set update -g {} -n {} --set tags.test=success'.format(self.resource_group, self.name),
+ checks=JMESPathCheck('tags.test', 'success'))
self.cmd('vm availability-set list -g {}'.format(self.resource_group), checks=[
JMESPathCheck('length(@)', 1),
JMESPathCheck('[0].name', self.name),
@@ -1488,12 +1490,13 @@ class VMSSCreateNoneOptionsTest(ResourceGroupVCRTestBase): # pylint: disable=to
self.cmd('vmss create -n {0} -g {1} --image Debian --load-balancer {3} --admin-username ubuntu'
' --ssh-key-value \'{2}\' --public-ip-address {3} --tags {3}'
.format(vmss_name, self.resource_group, TEST_SSH_KEY_PUB, '""' if platform.system() == 'Windows' else "''"))
-
self.cmd('vmss show -n {} -g {}'.format(vmss_name, self.resource_group), [
JMESPathCheck('availabilitySet', None),
JMESPathCheck('tags', {}),
JMESPathCheck('virtualMachineProfile.networkProfile.networkInterfaceConfigurations.ipConfigurations.loadBalancerBackendAddressPools', None)
])
+ self.cmd('vmss update -g {} -n {} --set tags.test=success'.format(self.resource_group, vmss_name),
+ checks=JMESPathCheck('tags.test', 'success'))
self.cmd('network public-ip show -n {}PublicIP -g {}'.format(vmss_name, self.resource_group), checks=NoneCheck(), allowed_exceptions='was not found')
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 7
} | 2.0 | {
"env_vars": null,
"env_yml_path": null,
"install": "python scripts/dev_setup.py",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | adal==0.4.3
applicationinsights==0.10.0
argcomplete==1.8.0
astroid==1.4.9
attrs==22.2.0
autopep8==1.2.4
azure-batch==2.0.0
-e git+https://github.com/Azure/azure-cli.git@6186044564dc3436b7551c58eda07db4412a49f8#egg=azure_cli&subdirectory=src/azure-cli
-e git+https://github.com/Azure/azure-cli.git@6186044564dc3436b7551c58eda07db4412a49f8#egg=azure_cli_acr&subdirectory=src/command_modules/azure-cli-acr
-e git+https://github.com/Azure/azure-cli.git@6186044564dc3436b7551c58eda07db4412a49f8#egg=azure_cli_acs&subdirectory=src/command_modules/azure-cli-acs
-e git+https://github.com/Azure/azure-cli.git@6186044564dc3436b7551c58eda07db4412a49f8#egg=azure_cli_appservice&subdirectory=src/command_modules/azure-cli-appservice
-e git+https://github.com/Azure/azure-cli.git@6186044564dc3436b7551c58eda07db4412a49f8#egg=azure_cli_batch&subdirectory=src/command_modules/azure-cli-batch
-e git+https://github.com/Azure/azure-cli.git@6186044564dc3436b7551c58eda07db4412a49f8#egg=azure_cli_cloud&subdirectory=src/command_modules/azure-cli-cloud
-e git+https://github.com/Azure/azure-cli.git@6186044564dc3436b7551c58eda07db4412a49f8#egg=azure_cli_component&subdirectory=src/command_modules/azure-cli-component
-e git+https://github.com/Azure/azure-cli.git@6186044564dc3436b7551c58eda07db4412a49f8#egg=azure_cli_configure&subdirectory=src/command_modules/azure-cli-configure
-e git+https://github.com/Azure/azure-cli.git@6186044564dc3436b7551c58eda07db4412a49f8#egg=azure_cli_container&subdirectory=src/command_modules/azure-cli-container
-e git+https://github.com/Azure/azure-cli.git@6186044564dc3436b7551c58eda07db4412a49f8#egg=azure_cli_core&subdirectory=src/azure-cli-core
-e git+https://github.com/Azure/azure-cli.git@6186044564dc3436b7551c58eda07db4412a49f8#egg=azure_cli_dla&subdirectory=src/command_modules/azure-cli-dla
-e git+https://github.com/Azure/azure-cli.git@6186044564dc3436b7551c58eda07db4412a49f8#egg=azure_cli_dls&subdirectory=src/command_modules/azure-cli-dls
-e git+https://github.com/Azure/azure-cli.git@6186044564dc3436b7551c58eda07db4412a49f8#egg=azure_cli_documentdb&subdirectory=src/command_modules/azure-cli-documentdb
-e git+https://github.com/Azure/azure-cli.git@6186044564dc3436b7551c58eda07db4412a49f8#egg=azure_cli_feedback&subdirectory=src/command_modules/azure-cli-feedback
-e git+https://github.com/Azure/azure-cli.git@6186044564dc3436b7551c58eda07db4412a49f8#egg=azure_cli_find&subdirectory=src/command_modules/azure-cli-find
-e git+https://github.com/Azure/azure-cli.git@6186044564dc3436b7551c58eda07db4412a49f8#egg=azure_cli_iot&subdirectory=src/command_modules/azure-cli-iot
-e git+https://github.com/Azure/azure-cli.git@6186044564dc3436b7551c58eda07db4412a49f8#egg=azure_cli_keyvault&subdirectory=src/command_modules/azure-cli-keyvault
-e git+https://github.com/Azure/azure-cli.git@6186044564dc3436b7551c58eda07db4412a49f8#egg=azure_cli_lab&subdirectory=src/command_modules/azure-cli-lab
-e git+https://github.com/Azure/azure-cli.git@6186044564dc3436b7551c58eda07db4412a49f8#egg=azure_cli_monitor&subdirectory=src/command_modules/azure-cli-monitor
-e git+https://github.com/Azure/azure-cli.git@6186044564dc3436b7551c58eda07db4412a49f8#egg=azure_cli_network&subdirectory=src/command_modules/azure-cli-network
-e git+https://github.com/Azure/azure-cli.git@6186044564dc3436b7551c58eda07db4412a49f8#egg=azure_cli_nspkg&subdirectory=src/azure-cli-nspkg
-e git+https://github.com/Azure/azure-cli.git@6186044564dc3436b7551c58eda07db4412a49f8#egg=azure_cli_profile&subdirectory=src/command_modules/azure-cli-profile
-e git+https://github.com/Azure/azure-cli.git@6186044564dc3436b7551c58eda07db4412a49f8#egg=azure_cli_redis&subdirectory=src/command_modules/azure-cli-redis
-e git+https://github.com/Azure/azure-cli.git@6186044564dc3436b7551c58eda07db4412a49f8#egg=azure_cli_resource&subdirectory=src/command_modules/azure-cli-resource
-e git+https://github.com/Azure/azure-cli.git@6186044564dc3436b7551c58eda07db4412a49f8#egg=azure_cli_role&subdirectory=src/command_modules/azure-cli-role
-e git+https://github.com/Azure/azure-cli.git@6186044564dc3436b7551c58eda07db4412a49f8#egg=azure_cli_sql&subdirectory=src/command_modules/azure-cli-sql
-e git+https://github.com/Azure/azure-cli.git@6186044564dc3436b7551c58eda07db4412a49f8#egg=azure_cli_storage&subdirectory=src/command_modules/azure-cli-storage
-e git+https://github.com/Azure/azure-cli.git@6186044564dc3436b7551c58eda07db4412a49f8#egg=azure_cli_taskhelp&subdirectory=src/command_modules/azure-cli-taskhelp
-e git+https://github.com/Azure/azure-cli.git@6186044564dc3436b7551c58eda07db4412a49f8#egg=azure_cli_testsdk&subdirectory=src/azure-cli-testsdk
-e git+https://github.com/Azure/azure-cli.git@6186044564dc3436b7551c58eda07db4412a49f8#egg=azure_cli_utility_automation&subdirectory=scripts
-e git+https://github.com/Azure/azure-cli.git@6186044564dc3436b7551c58eda07db4412a49f8#egg=azure_cli_vm&subdirectory=src/command_modules/azure-cli-vm
azure-common==1.1.4
azure-core==1.24.2
azure-datalake-store==0.0.6
azure-graphrbac==0.30.0rc6
azure-keyvault==0.1.0
azure-mgmt-authorization==0.30.0rc6
azure-mgmt-batch==3.0.0
azure-mgmt-compute==0.33.1rc1
azure-mgmt-containerregistry==0.2.0
azure-mgmt-datalake-analytics==0.1.3
azure-mgmt-datalake-nspkg==3.0.1
azure-mgmt-datalake-store==0.1.3
azure-mgmt-dns==1.0.0
azure-mgmt-documentdb==0.1.1
azure-mgmt-iothub==0.2.1
azure-mgmt-keyvault==0.30.0
azure-mgmt-monitor==0.1.0
azure-mgmt-network==0.30.0
azure-mgmt-nspkg==3.0.2
azure-mgmt-redis==1.0.0
azure-mgmt-resource==0.30.2
azure-mgmt-sql==0.4.0
azure-mgmt-storage==0.31.0
azure-mgmt-trafficmanager==0.30.0rc6
azure-mgmt-web==0.31.0
azure-monitor==0.2.0
azure-nspkg==3.0.2
azure-storage==0.34.0
certifi==2021.5.30
cffi==1.15.1
colorama==0.3.7
coverage==4.2
cryptography==40.0.2
flake8==3.2.1
importlib-metadata==4.8.3
iniconfig==1.1.1
isodate==0.7.0
jeepney==0.7.1
jmespath==0.10.0
keyring==23.4.1
lazy-object-proxy==1.7.1
mccabe==0.5.3
mock==1.3.0
msrest==0.4.29
msrestazure==0.4.34
nose==1.3.7
oauthlib==3.2.2
packaging==21.3
paramiko==2.0.2
pbr==6.1.1
pep8==1.7.1
pluggy==1.0.0
py==1.11.0
pyasn1==0.5.1
pycodestyle==2.2.0
pycparser==2.21
pyflakes==1.3.0
Pygments==2.1.3
PyJWT==2.4.0
pylint==1.5.4
pyOpenSSL==16.2.0
pyparsing==3.1.4
pytest==7.0.1
python-dateutil==2.9.0.post0
PyYAML==3.11
requests==2.9.1
requests-oauthlib==2.0.0
scp==0.15.0
SecretStorage==3.3.3
six==1.10.0
sshtunnel==0.4.0
tabulate==0.7.5
tomli==1.2.3
typing-extensions==4.1.1
urllib3==1.16
vcrpy==1.10.3
Whoosh==2.7.4
wrapt==1.16.0
xmltodict==0.14.2
zipp==3.6.0
| name: azure-cli
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- adal==0.4.3
- applicationinsights==0.10.0
- argcomplete==1.8.0
- astroid==1.4.9
- attrs==22.2.0
- autopep8==1.2.4
- azure-batch==2.0.0
- azure-common==1.1.4
- azure-core==1.24.2
- azure-datalake-store==0.0.6
- azure-graphrbac==0.30.0rc6
- azure-keyvault==0.1.0
- azure-mgmt-authorization==0.30.0rc6
- azure-mgmt-batch==3.0.0
- azure-mgmt-compute==0.33.1rc1
- azure-mgmt-containerregistry==0.2.0
- azure-mgmt-datalake-analytics==0.1.3
- azure-mgmt-datalake-nspkg==3.0.1
- azure-mgmt-datalake-store==0.1.3
- azure-mgmt-dns==1.0.0
- azure-mgmt-documentdb==0.1.1
- azure-mgmt-iothub==0.2.1
- azure-mgmt-keyvault==0.30.0
- azure-mgmt-monitor==0.1.0
- azure-mgmt-network==0.30.0
- azure-mgmt-nspkg==3.0.2
- azure-mgmt-redis==1.0.0
- azure-mgmt-resource==0.30.2
- azure-mgmt-sql==0.4.0
- azure-mgmt-storage==0.31.0
- azure-mgmt-trafficmanager==0.30.0rc6
- azure-mgmt-web==0.31.0
- azure-monitor==0.2.0
- azure-nspkg==3.0.2
- azure-storage==0.34.0
- cffi==1.15.1
- colorama==0.3.7
- coverage==4.2
- cryptography==40.0.2
- flake8==3.2.1
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- isodate==0.7.0
- jeepney==0.7.1
- jmespath==0.10.0
- keyring==23.4.1
- lazy-object-proxy==1.7.1
- mccabe==0.5.3
- mock==1.3.0
- msrest==0.4.29
- msrestazure==0.4.34
- nose==1.3.7
- oauthlib==3.2.2
- packaging==21.3
- paramiko==2.0.2
- pbr==6.1.1
- pep8==1.7.1
- pip==9.0.1
- pluggy==1.0.0
- py==1.11.0
- pyasn1==0.5.1
- pycodestyle==2.2.0
- pycparser==2.21
- pyflakes==1.3.0
- pygments==2.1.3
- pyjwt==2.4.0
- pylint==1.5.4
- pyopenssl==16.2.0
- pyparsing==3.1.4
- pytest==7.0.1
- python-dateutil==2.9.0.post0
- pyyaml==3.11
- requests==2.9.1
- requests-oauthlib==2.0.0
- scp==0.15.0
- secretstorage==3.3.3
- setuptools==30.4.0
- six==1.10.0
- sshtunnel==0.4.0
- tabulate==0.7.5
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.16
- vcrpy==1.10.3
- whoosh==2.7.4
- wrapt==1.16.0
- xmltodict==0.14.2
- zipp==3.6.0
prefix: /opt/conda/envs/azure-cli
| [
"src/command_modules/azure-cli-vm/tests/test_vm_commands.py::VMAvailSetScenarioTest::test_vm_availset",
"src/command_modules/azure-cli-vm/tests/test_vm_commands.py::VMSSCreateNoneOptionsTest::test_vmss_create_none_options"
]
| [
"src/command_modules/azure-cli-vm/tests/test_vm_commands.py::VMCreateLinuxSecretsScenarioTest::test_vm_create_linux_secrets",
"src/command_modules/azure-cli-vm/tests/test_vm_commands.py::VMCreateWindowsSecretsScenarioTest::test_vm_create_windows_secrets",
"src/command_modules/azure-cli-vm/tests/test_vm_commands.py::VMSSCreateLinuxSecretsScenarioTest::test_vmss_create_linux_secrets"
]
| [
"src/command_modules/azure-cli-vm/tests/test_vm_commands.py::VMImageListByAliasesScenarioTest::test_vm_image_list_by_alias",
"src/command_modules/azure-cli-vm/tests/test_vm_commands.py::VMUsageScenarioTest::test_vm_usage",
"src/command_modules/azure-cli-vm/tests/test_vm_commands.py::VMImageListThruServiceScenarioTest::test_vm_images_list_thru_services",
"src/command_modules/azure-cli-vm/tests/test_vm_commands.py::VMOpenPortTest::test_vm_open_port",
"src/command_modules/azure-cli-vm/tests/test_vm_commands.py::VMShowListSizesListIPAddressesScenarioTest::test_vm_show_list_sizes_list_ip_addresses",
"src/command_modules/azure-cli-vm/tests/test_vm_commands.py::VMSizeListScenarioTest::test_vm_size_list",
"src/command_modules/azure-cli-vm/tests/test_vm_commands.py::VMImageListOffersScenarioTest::test_vm_image_list_offers",
"src/command_modules/azure-cli-vm/tests/test_vm_commands.py::VMImageListPublishersScenarioTest::test_vm_image_list_publishers",
"src/command_modules/azure-cli-vm/tests/test_vm_commands.py::VMImageListSkusScenarioTest::test_vm_image_list_skus",
"src/command_modules/azure-cli-vm/tests/test_vm_commands.py::VMImageShowScenarioTest::test_vm_image_show",
"src/command_modules/azure-cli-vm/tests/test_vm_commands.py::VMGeneralizeScenarioTest::test_vm_generalize",
"src/command_modules/azure-cli-vm/tests/test_vm_commands.py::VMCreateFromUnmanagedDiskTest::test_vm_create_from_unmanaged_disk",
"src/command_modules/azure-cli-vm/tests/test_vm_commands.py::VMCreateWithSpecializedUnmanagedDiskTest::test_vm_create_with_specialized_unmanaged_disk",
"src/command_modules/azure-cli-vm/tests/test_vm_commands.py::VMManagedDiskScenarioTest::test_managed_disk",
"src/command_modules/azure-cli-vm/tests/test_vm_commands.py::VMCreateAndStateModificationsScenarioTest::test_vm_create_state_modifications",
"src/command_modules/azure-cli-vm/tests/test_vm_commands.py::VMNoWaitScenarioTest::test_vm_create_no_wait",
"src/command_modules/azure-cli-vm/tests/test_vm_commands.py::VMExtensionScenarioTest::test_vm_extension",
"src/command_modules/azure-cli-vm/tests/test_vm_commands.py::VMMachineExtensionImageScenarioTest::test_vm_machine_extension_image",
"src/command_modules/azure-cli-vm/tests/test_vm_commands.py::VMExtensionImageSearchScenarioTest::test_vm_extension_image_search",
"src/command_modules/azure-cli-vm/tests/test_vm_commands.py::VMCreateUbuntuScenarioTest::test_vm_create_ubuntu",
"src/command_modules/azure-cli-vm/tests/test_vm_commands.py::VMMultiNicScenarioTest::test_vm_create_multi_nics",
"src/command_modules/azure-cli-vm/tests/test_vm_commands.py::VMCreateNoneOptionsTest::test_vm_create_none_options",
"src/command_modules/azure-cli-vm/tests/test_vm_commands.py::VMBootDiagnostics::test_vm_boot_diagnostics",
"src/command_modules/azure-cli-vm/tests/test_vm_commands.py::VMSSExtensionInstallTest::test_vmss_extension",
"src/command_modules/azure-cli-vm/tests/test_vm_commands.py::DiagnosticsExtensionInstallTest::test_diagnostics_extension_install",
"src/command_modules/azure-cli-vm/tests/test_vm_commands.py::VMCreateExistingOptions::test_vm_create_existing_options",
"src/command_modules/azure-cli-vm/tests/test_vm_commands.py::VMCreateExistingIdsOptions::test_vm_create_existing_ids_options",
"src/command_modules/azure-cli-vm/tests/test_vm_commands.py::VMCreateCustomIP::test_vm_create_custom_ip",
"src/command_modules/azure-cli-vm/tests/test_vm_commands.py::VMDiskAttachDetachTest::test_vm_disk_attach_detach",
"src/command_modules/azure-cli-vm/tests/test_vm_commands.py::VMUnmanagedDataDiskTest::test_vm_data_unmanaged_disk",
"src/command_modules/azure-cli-vm/tests/test_vm_commands.py::VMCreateCustomDataScenarioTest::test_vm_create_custom_data",
"src/command_modules/azure-cli-vm/tests/test_vm_commands.py::AzureContainerServiceScenarioTest::test_acs_create_update",
"src/command_modules/azure-cli-vm/tests/test_vm_commands.py::VMSSCreateAndModify::test_vmss_create_and_modify",
"src/command_modules/azure-cli-vm/tests/test_vm_commands.py::VMSSCreateOptions::test_vmss_create_options",
"src/command_modules/azure-cli-vm/tests/test_vm_commands.py::VMSSCreateExistingOptions::test_vmss_create_existing_options",
"src/command_modules/azure-cli-vm/tests/test_vm_commands.py::VMSSCreateExistingIdsOptions::test_vmss_create_existing_ids_options",
"src/command_modules/azure-cli-vm/tests/test_vm_commands.py::VMSSVMsScenarioTest::test_vmss_vms",
"src/command_modules/azure-cli-vm/tests/test_vm_commands.py::VMSSCustomDataScenarioTest::test_vmss_create_custom_data",
"src/command_modules/azure-cli-vm/tests/test_vm_commands.py::VMSSNicScenarioTest::test_vmss_nics"
]
| []
| MIT License | 1,154 | [
"azure-cli.pyproj",
"src/command_modules/azure-cli-acs/azure/cli/command_modules/acs/custom.py",
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/custom.py",
"src/command_modules/azure-cli-acs/HISTORY.rst",
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/_help.py",
"src/command_modules/azure-cli-vm/HISTORY.rst",
"src/command_modules/azure-cli-resource/azure/cli/command_modules/resource/_help.py"
]
| [
"azure-cli.pyproj",
"src/command_modules/azure-cli-acs/azure/cli/command_modules/acs/custom.py",
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/custom.py",
"src/command_modules/azure-cli-acs/HISTORY.rst",
"src/command_modules/azure-cli-vm/azure/cli/command_modules/vm/_help.py",
"src/command_modules/azure-cli-vm/HISTORY.rst",
"src/command_modules/azure-cli-resource/azure/cli/command_modules/resource/_help.py"
]
|
ARMmbed__yotta-804 | 4094b7a26c66dd64ff724d4f72da282d41ea9fca | 2017-04-06 15:06:03 | 2575c2f7cd0977b5df2347223738629d28e5310b | diff --git a/yotta/lib/sourceparse.py b/yotta/lib/sourceparse.py
index 0f451ad..eb1f0b4 100644
--- a/yotta/lib/sourceparse.py
+++ b/yotta/lib/sourceparse.py
@@ -57,7 +57,7 @@ def _getNonRegistryRef(source_url):
# something/something#spec = github
# something/something@spec = github
# something/something spec = github
- github_match = re.match('^([.a-z0-9_-]+/([.a-z0-9_-]+)) *[@#]?([.a-z0-9_\-\*\^\~\>\<\=]*)$', source_url, re.I)
+ github_match = re.match(r'^([.a-z0-9_-]+/([.a-z0-9_-]+)) *[@#]?([^/:\?\[\\]*)$', source_url, re.I)
if github_match:
return github_match.group(2), VersionSource('github', github_match.group(1), github_match.group(3))
| Semver incompatibility
Hi,
It seems the recent version has broken semantic version for any previous version, which we heavily use in our project, [microbit-dal](https://github.com/lancaster-university/microbit-dal).
We have had two new users on v18 who have reported this breakage: https://github.com/lancaster-university/microbit-dal/issues/282
Any help would be greatly appreciated :smile: | ARMmbed/yotta | diff --git a/yotta/test/test_sourceparse.py b/yotta/test/test_sourceparse.py
index 0b7af6f..2c421ba 100644
--- a/yotta/test/test_sourceparse.py
+++ b/yotta/test/test_sourceparse.py
@@ -47,6 +47,7 @@ Git_Specs = [
'~1.2.3',
'-1.2.3',
'branch-or-tag-name',
+ 'branch+or+tag+name',
'd5f5049',
]
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 2,
"test_score": 1
},
"num_modified_files": 1
} | 0.18 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[develop]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.9",
"reqs_path": [
"requirements/base.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | argcomplete==1.12.3
certifi==2025.1.31
cffi==1.17.1
charset-normalizer==3.4.1
colorama==0.3.9
cryptography==44.0.2
Deprecated==1.2.18
exceptiongroup==1.2.2
future==1.0.0
hgapi==1.7.4
idna==3.10
iniconfig==2.1.0
intelhex==2.3.0
intervaltree==3.1.0
Jinja2==2.11.3
jsonpointer==1.14
jsonschema==2.6.0
MarkupSafe==3.0.2
mbed_test_wrapper==1.0.0
packaging==24.2
pathlib==1.0.1
pluggy==1.5.0
project-generator-definitions==0.2.46
project_generator==0.8.17
pycparser==2.22
pyelftools==0.23
PyGithub==1.54.1
PyJWT==1.7.1
pyocd==0.15.0
pytest==8.3.5
pyusb==1.3.1
PyYAML==3.13
requests==2.32.3
semantic-version==2.10.0
six==1.17.0
sortedcontainers==2.4.0
tomli==2.2.1
urllib3==2.3.0
valinor==0.0.15
websocket-client==1.8.0
wrapt==1.17.2
xmltodict==0.14.2
-e git+https://github.com/ARMmbed/yotta.git@4094b7a26c66dd64ff724d4f72da282d41ea9fca#egg=yotta
| name: yotta
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- setuptools=75.8.0=py39h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- tzdata=2025a=h04d1e81_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- argcomplete==1.12.3
- argparse==1.4.0
- certifi==2025.1.31
- cffi==1.17.1
- charset-normalizer==3.4.1
- colorama==0.3.9
- cryptography==44.0.2
- deprecated==1.2.18
- exceptiongroup==1.2.2
- future==1.0.0
- hgapi==1.7.4
- idna==3.10
- iniconfig==2.1.0
- intelhex==2.3.0
- intervaltree==3.1.0
- jinja2==2.11.3
- jsonpointer==1.14
- jsonschema==2.6.0
- markupsafe==3.0.2
- mbed-test-wrapper==1.0.0
- packaging==24.2
- pathlib==1.0.1
- pluggy==1.5.0
- project-generator==0.8.17
- project-generator-definitions==0.2.46
- pycparser==2.22
- pyelftools==0.23
- pygithub==1.54.1
- pyjwt==1.7.1
- pyocd==0.15.0
- pytest==8.3.5
- pyusb==1.3.1
- pyyaml==3.13
- requests==2.32.3
- semantic-version==2.10.0
- six==1.17.0
- sortedcontainers==2.4.0
- tomli==2.2.1
- urllib3==2.3.0
- valinor==0.0.15
- websocket-client==1.8.0
- wrapt==1.17.2
- xmltodict==0.14.2
prefix: /opt/conda/envs/yotta
| [
"yotta/test/test_sourceparse.py::TestParseSourceURL::test_shorthandURLs",
"yotta/test/test_sourceparse.py::TestParseModuleNameAndSpec::test_ShorthandRefs"
]
| []
| [
"yotta/test/test_sourceparse.py::TestParseSourceURL::test_gitURLs",
"yotta/test/test_sourceparse.py::TestParseSourceURL::test_githubURLs",
"yotta/test/test_sourceparse.py::TestParseSourceURL::test_hgURLs",
"yotta/test/test_sourceparse.py::TestParseSourceURL::test_invalid",
"yotta/test/test_sourceparse.py::TestParseSourceURL::test_registryURLs",
"yotta/test/test_sourceparse.py::TestParseModuleNameAndSpec::test_GitRefs",
"yotta/test/test_sourceparse.py::TestParseModuleNameAndSpec::test_GithubRefs",
"yotta/test/test_sourceparse.py::TestParseModuleNameAndSpec::test_HGRefs",
"yotta/test/test_sourceparse.py::TestParseModuleNameAndSpec::test_atVersion",
"yotta/test/test_sourceparse.py::TestParseModuleNameAndSpec::test_validNames"
]
| []
| Apache License 2.0 | 1,155 | [
"yotta/lib/sourceparse.py"
]
| [
"yotta/lib/sourceparse.py"
]
|
|
smarter-travel-media__warthog-16 | ccc611dd6f05482fdb81348bb972eea06d456dc7 | 2017-04-06 16:10:22 | 0748fe97aff223b7f5cac008c856e3273b6a0343 | diff --git a/doc/source/changes.rst b/doc/source/changes.rst
index aa5b3f1..0c2907a 100644
--- a/doc/source/changes.rst
+++ b/doc/source/changes.rst
@@ -2,6 +2,16 @@ Changelog
=========
+1.999.1 - 2017-04-06
+---------------------
+.. note::
+
+ This version is only meant to be used internally at SmarterTravel as a transition
+ step between two load balancers. As such, it is not available on PyPI.
+
+* Add support for new Python SSL constant ``PROTOCOL_TLS`` to allow negotiation of SSL
+ versions between the client and server.
+
1.999.0 - 2017-04-05
--------------------
.. note::
diff --git a/warthog/__init__.py b/warthog/__init__.py
index 9af2178..6d1434b 100644
--- a/warthog/__init__.py
+++ b/warthog/__init__.py
@@ -14,4 +14,4 @@ warthog
Simple client for A10 load balancers.
"""
-__version__ = '1.999.0'
+__version__ = '1.999.1'
diff --git a/warthog/ssl.py b/warthog/ssl.py
index dea969f..92b4b3d 100644
--- a/warthog/ssl.py
+++ b/warthog/ssl.py
@@ -22,7 +22,7 @@ SSL related constants used by Warthog
PROTOCOL_SSLv3 = 1
-PROTOCOL_SSLv23 = 2
+PROTOCOL_TLS = PROTOCOL_SSLv23 = 2
PROTOCOL_TLSv1 = 3
| Unsupported TLS versions
Turns out that even if you manage to use a different SSL version, still needs to supported by the Python `ssl` module. Who knew? To this end we're going to add the new `PROTOCOL_TLS` constant (effectively the same as `PROTOCOL_SSLv23`) and use that to allow negotiation between the LB and our ancient Python version. | smarter-travel-media/warthog | diff --git a/test/test_ssl.py b/test/test_ssl.py
index 9165683..1b3ebf3 100644
--- a/test/test_ssl.py
+++ b/test/test_ssl.py
@@ -21,6 +21,24 @@ def test_tls1_matches():
assert ssl.PROTOCOL_TLSv1 == warthog.ssl.PROTOCOL_TLSv1
+def test_tls_matches_ssl23():
+ # New constant in Python 2.7.13 for negotiation of the highest
+ # supported protocol. Same value as the previous "negotiate"
+ # constant (SSLv23).
+ assert ssl.PROTOCOL_SSLv23 == warthog.ssl.PROTOCOL_TLS
+
+
+def test_tls_matches():
+ try:
+ # It's possible that we're running under an old version of Python
+ # and this constant doesn't exist (hence why warthog.ssl exists).
+ module_const = ssl.PROTOCOL_TLS
+ except AttributeError:
+ return
+
+ assert module_const == warthog.ssl.PROTOCOL_TLS
+
+
def test_tls1_1_matches():
try:
# It's possible that we're running under an old version of Python
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 3
} | 1.999 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.4",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
click==6.7
importlib-metadata==4.8.3
iniconfig==1.1.1
packaging==21.3
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
requests==2.11.1
tomli==1.2.3
typing_extensions==4.1.1
-e git+https://github.com/smarter-travel-media/warthog.git@ccc611dd6f05482fdb81348bb972eea06d456dc7#egg=warthog
zipp==3.6.0
| name: warthog
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- click==6.7
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- requests==2.11.1
- tomli==1.2.3
- typing-extensions==4.1.1
- zipp==3.6.0
prefix: /opt/conda/envs/warthog
| [
"test/test_ssl.py::test_tls_matches_ssl23",
"test/test_ssl.py::test_tls_matches"
]
| [
"test/test_ssl.py::test_ssl3_matches"
]
| [
"test/test_ssl.py::test_ssl23_matches",
"test/test_ssl.py::test_tls1_matches",
"test/test_ssl.py::test_tls1_1_matches",
"test/test_ssl.py::test_tls1_2_matches"
]
| []
| MIT License | 1,156 | [
"warthog/ssl.py",
"warthog/__init__.py",
"doc/source/changes.rst"
]
| [
"warthog/ssl.py",
"warthog/__init__.py",
"doc/source/changes.rst"
]
|
|
dssg__triage-89 | 259af31519000bc2bc94f9300eaf4bc596631551 | 2017-04-06 16:59:14 | 478ac2e52e0b074c262eb9fadf25c3ff598cb911 | codecov-io: # [Codecov](https://codecov.io/gh/dssg/triage/pull/89?src=pr&el=h1) Report
> Merging [#89](https://codecov.io/gh/dssg/triage/pull/89?src=pr&el=desc) into [master](https://codecov.io/gh/dssg/triage/commit/259af31519000bc2bc94f9300eaf4bc596631551?src=pr&el=desc) will **decrease** coverage by `0.01%`.
> The diff coverage is `88.88%`.
[](https://codecov.io/gh/dssg/triage/pull/89?src=pr&el=tree)
```diff
@@ Coverage Diff @@
## master #89 +/- ##
==========================================
- Coverage 89.74% 89.72% -0.02%
==========================================
Files 22 22
Lines 946 954 +8
==========================================
+ Hits 849 856 +7
- Misses 97 98 +1
```
| [Impacted Files](https://codecov.io/gh/dssg/triage/pull/89?src=pr&el=tree) | Coverage Δ | |
|---|---|---|
| [triage/feature\_group\_creator.py](https://codecov.io/gh/dssg/triage/pull/89?src=pr&el=tree#diff-dHJpYWdlL2ZlYXR1cmVfZ3JvdXBfY3JlYXRvci5weQ==) | `92.85% <ø> (ø)` | :arrow_up: |
| [triage/feature\_group\_mixer.py](https://codecov.io/gh/dssg/triage/pull/89?src=pr&el=tree#diff-dHJpYWdlL2ZlYXR1cmVfZ3JvdXBfbWl4ZXIucHk=) | `96.55% <88.88%> (-3.45%)` | :arrow_down: |
------
[Continue to review full report at Codecov](https://codecov.io/gh/dssg/triage/pull/89?src=pr&el=continue).
> **Legend** - [Click here to learn more](https://docs.codecov.io/docs/codecov-delta)
> `Δ = absolute <relative> (impact)`, `ø = not affected`, `? = missing data`
> Powered by [Codecov](https://codecov.io/gh/dssg/triage/pull/89?src=pr&el=footer). Last update [259af31...fdd0c66](https://codecov.io/gh/dssg/triage/pull/89?src=pr&el=lastupdated). Read the [comment docs](https://docs.codecov.io/docs/pull-request-comments). | diff --git a/example_experiment_config.yaml b/example_experiment_config.yaml
index 2b5cd470..5eb574d3 100644
--- a/example_experiment_config.yaml
+++ b/example_experiment_config.yaml
@@ -98,7 +98,7 @@ feature_group_definition:
tables: ['prefix_entity_id']
# strategies for generating combinations of groups
-# available: all, leave-one-out
+# available: all, leave-one-out, leave-one-in
feature_group_strategies: ['all']
diff --git a/triage/db.py b/triage/db.py
index 855a992c..734ef978 100644
--- a/triage/db.py
+++ b/triage/db.py
@@ -52,8 +52,8 @@ class ModelGroup(Base):
model_group_id = Column(Integer, primary_key=True)
model_type = Column(Text)
model_parameters = Column(JSONB)
- prediction_window = Column(Text)
feature_list = Column(ARRAY(Text))
+ model_config = Column(JSONB)
class Model(Base):
diff --git a/triage/feature_group_creator.py b/triage/feature_group_creator.py
index 95c82033..0c60dfc5 100644
--- a/triage/feature_group_creator.py
+++ b/triage/feature_group_creator.py
@@ -20,6 +20,7 @@ def all_subsetter(config_item, table, features):
class FeatureGroupCreator(object):
+ """Divides a feature dictionary into groups based on given criteria"""
subsetters = {
'tables': table_subsetter,
'prefix': prefix_subsetter,
@@ -45,7 +46,15 @@ class FeatureGroupCreator(object):
Args:
feature_dictionary (dict) tables and the features contained in each
- Returns: (list) subsets of the feature dictionary
+ The feature dictionary is meant to be keyed on source table. Example:
+
+ {
+ 'feature_table_one': ['feature_one', feature_two'],
+ 'feature_table_two': ['feature_three', 'feature_four'],
+ }
+
+ Returns: (list) subsets of the feature dictionary, in the same
+ table-based structure
"""
subsets = []
for name, config in sorted(self.definition.items()):
diff --git a/triage/feature_group_mixer.py b/triage/feature_group_mixer.py
index a04fa900..ff8f6cfe 100644
--- a/triage/feature_group_mixer.py
+++ b/triage/feature_group_mixer.py
@@ -1,3 +1,14 @@
+def leave_one_in(feature_groups):
+ """For each group, return a copy of just that group
+
+ Args:
+ feature_groups (list) The feature groups to apply the strategy to
+
+ Returns: A list of feature dicts
+ """
+ return feature_groups
+
+
def leave_one_out(feature_groups):
"""For each group, return a copy of all groups excluding that group
@@ -18,7 +29,17 @@ def leave_one_out(feature_groups):
def all_features(feature_groups):
- return feature_groups
+ """Return a combination of all feature groups
+
+ Args:
+ feature_groups (list) The feature groups to apply the strategy to
+
+ Returns: A list of feature dicts
+ """
+ feature_dict = {}
+ for group in feature_groups:
+ feature_dict.update(group)
+ return [feature_dict]
class FeatureGroupMixer(object):
@@ -26,10 +47,14 @@ class FeatureGroupMixer(object):
based on a list of strategies"""
strategy_lookup = {
'leave-one-out': leave_one_out,
+ 'leave-one-in': leave_one_in,
'all': all_features,
}
def __init__(self, strategies):
+ for strategy in strategies:
+ if strategy not in self.strategy_lookup:
+ raise ValueError('Unknown strategy "{}"'.format(strategy))
self.strategies = strategies
def generate(self, feature_groups):
diff --git a/triage/model_group_stored_procedure.sql b/triage/model_group_stored_procedure.sql
index 84a6d9b7..8d3b2a5a 100644
--- a/triage/model_group_stored_procedure.sql
+++ b/triage/model_group_stored_procedure.sql
@@ -12,9 +12,9 @@ CREATE TABLE results.model_groups
-----------
populates the table and returns the IDs
*/
-CREATE OR REPLACE FUNCTION get_model_group_id(in_model_type TEXT, in_model_parameters JSONB,
- in_prediction_window TEXT,
- in_feature_list TEXT [])
+CREATE OR REPLACE FUNCTION public.get_model_group_id(in_model_type TEXT, in_model_parameters JSONB,
+ in_feature_list TEXT [],
+ in_model_config JSONB)
RETURNS INTEGER AS
$BODY$
DECLARE
@@ -28,12 +28,14 @@ BEGIN
INTO model_group_return_id
FROM results.model_groups
WHERE
- model_type = in_model_type AND model_parameters = in_model_parameters AND prediction_window = in_prediction_window
- AND feature_list = ARRAY(Select unnest(in_feature_list) ORDER BY 1);
+ model_type = in_model_type
+ AND model_parameters = in_model_parameters
+ AND feature_list = ARRAY(Select unnest(in_feature_list) ORDER BY 1)
+ AND model_config = in_model_config ;
IF NOT FOUND
THEN
- INSERT INTO results.model_groups (model_group_id, model_type, model_parameters, prediction_window, feature_list)
- VALUES (DEFAULT, in_model_type, in_model_parameters, in_prediction_window, ARRAY(Select unnest(in_feature_list) ORDER BY 1))
+ INSERT INTO results.model_groups (model_group_id, model_type, model_parameters, feature_list, model_config)
+ VALUES (DEFAULT, in_model_type, in_model_parameters, ARRAY(Select unnest(in_feature_list) ORDER BY 1), in_model_config)
RETURNING model_group_id
INTO model_group_return_id;
END IF;
diff --git a/triage/model_trainers.py b/triage/model_trainers.py
index 38d9bd18..6da11474 100644
--- a/triage/model_trainers.py
+++ b/triage/model_trainers.py
@@ -10,7 +10,6 @@ import pandas
import warnings
from triage.utils import filename_friendly_hash
-
def get_feature_importances(model):
"""
Get feature importances (from scikit-learn) of trained model.
@@ -168,10 +167,10 @@ class ModelTrainer(object):
rankings_pct):
feature_importance = FeatureImportance(
model=model,
- feature_importance=importance,
+ feature_importance=round(float(importance), 10),
feature=feature_names[feature_index],
rank_abs=int(rank_abs),
- rank_pct=float(rank_pct)
+ rank_pct=round(float(rank_pct), 10)
)
session.add(feature_importance)
session.commit()
@@ -207,8 +206,8 @@ class ModelTrainer(object):
model_group_id = self._get_model_group_id(
class_path,
parameters,
- matrix_store.metadata['prediction_window'],
- matrix_store.metadata['feature_names']
+ matrix_store.metadata['feature_names'],
+ matrix_store.metadata.get('model_config', dict())
)
logging.debug('Trained model')
model_store.write(trained_model)
@@ -229,8 +228,8 @@ class ModelTrainer(object):
self,
class_path,
parameters,
- prediction_window,
- feature_names
+ feature_names,
+ model_config
):
"""
Returns model group id using store procedure 'get_model_group_id' which will
@@ -240,9 +239,8 @@ class ModelTrainer(object):
Args:
class_path (string) A full classpath to the model class
parameters (dict) hyperparameters to give to the model constructor
- prediction_window (string) The prediction window used for generating the labels
- stored in metadata
- features_names (list) Features used for train/test
+ features_names (list) Features used for train/test
+ model_config (dict) Dictionary of the classes for comparing groups
Returns: (int) a database id for the model group id
"""
@@ -259,12 +257,13 @@ class ModelTrainer(object):
query = ("SELECT get_model_group_id( "
" '{class_path}'::TEXT, "
" '{parameters}'::JSONB, "
- " '{prediction_window}'::TEXT, "
- " ARRAY{feature_names}::TEXT [] )"
+ " ARRAY{feature_names}::TEXT [] , "
+ " '{model_config}'::JSONB )"
.format(class_path=class_path,
parameters=json.dumps(parameters),
- prediction_window=prediction_window,
- feature_names=feature_names))
+ feature_names=feature_names,
+ model_config=json.dumps(model_config, sort_keys=True)))
+
cur.execute(query)
db_conn.commit()
model_group_id = cur.fetchone()
diff --git a/triage/predictors.py b/triage/predictors.py
index 26414da7..825e8861 100644
--- a/triage/predictors.py
+++ b/triage/predictors.py
@@ -113,10 +113,10 @@ class Predictor(object):
model_id=int(model_id),
entity_id=int(entity_id),
as_of_date=matrix_end_time,
- score=float(score),
+ score=round(float(score), 10),
label_value=int(label) if not math.isnan(label) else None,
rank_abs=int(rank_abs),
- rank_pct=float(rank_pct),
+ rank_pct=round(float(rank_pct),10),
**misc_db_parameters
))
| Add more feature subsetting strategies
According to @rayidghani We can't use leave-one-out without leave-one-in, so we should fill out the strategies now. | dssg/triage | diff --git a/tests/test_feature_group_mixer.py b/tests/test_feature_group_mixer.py
index 2edeb1bb..a369388e 100644
--- a/tests/test_feature_group_mixer.py
+++ b/tests/test_feature_group_mixer.py
@@ -16,3 +16,31 @@ def test_feature_group_mixer_leave_one_out():
dict(itertools.chain(english_numbers.items(), letters.items())),
]
assert result == expected
+
+
+def test_feature_group_mixer_leave_one_in():
+ english_numbers = {'one': ['two', 'three'], 'four': ['five', 'six']}
+ letters = {'a': ['b', 'c'], 'd': ['e', 'f']}
+ german_numbers = {'eins': ['zwei', 'drei'], 'vier': ['funf', 'sechs']}
+ feature_groups = [english_numbers, letters, german_numbers]
+
+ result = FeatureGroupMixer(['leave-one-in']).generate(feature_groups)
+ expected = [
+ english_numbers,
+ letters,
+ german_numbers
+ ]
+ assert result == expected
+
+
+def test_feature_group_mixer_all():
+ english_numbers = {'one': ['two', 'three'], 'four': ['five', 'six']}
+ letters = {'a': ['b', 'c'], 'd': ['e', 'f']}
+ german_numbers = {'eins': ['zwei', 'drei'], 'vier': ['funf', 'sechs']}
+ feature_groups = [english_numbers, letters, german_numbers]
+
+ result = FeatureGroupMixer(['all']).generate(feature_groups)
+ expected = [
+ dict(itertools.chain(english_numbers.items(), letters.items(), german_numbers.items())),
+ ]
+ assert result == expected
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 7
} | 0.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest_v2",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc libblas-dev liblapack-dev libatlas-base-dev gfortran"
],
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "py.test -vvv -s --cov=triage"
} | attrs==22.2.0
boto3==1.23.10
botocore==1.26.10
certifi==2021.5.30
click==8.0.4
collate==0.3.0
coverage==6.2
GeoAlchemy2==0.11.1
greenlet==2.0.2
importlib-metadata==4.8.3
inflection==0.5.1
iniconfig==1.1.1
jmespath==0.10.0
joblib==1.1.1
numexpr==2.8.1
numpy==1.19.5
packaging==21.3
pandas==1.1.5
pluggy==1.0.0
psycopg2==2.7.7
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
pytest-cov==4.0.0
python-dateutil==2.9.0.post0
pytz==2025.2
PyYAML==6.0.1
s3transfer==0.5.2
scikit-learn==0.24.2
scipy==1.5.4
six==1.17.0
sklearn==0.0
SQLAlchemy==1.4.54
tables==3.7.0
threadpoolctl==3.1.0
tomli==1.2.3
-e git+https://github.com/dssg/triage.git@259af31519000bc2bc94f9300eaf4bc596631551#egg=triage
typing_extensions==4.1.1
urllib3==1.26.20
zipp==3.6.0
| name: triage
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- boto3==1.23.10
- botocore==1.26.10
- click==8.0.4
- collate==0.3.0
- coverage==6.2
- geoalchemy2==0.11.1
- greenlet==2.0.2
- importlib-metadata==4.8.3
- inflection==0.5.1
- iniconfig==1.1.1
- jmespath==0.10.0
- joblib==1.1.1
- numexpr==2.8.1
- numpy==1.19.5
- packaging==21.3
- pandas==1.1.5
- pluggy==1.0.0
- psycopg2==2.7.7
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- pytest-cov==4.0.0
- python-dateutil==2.9.0.post0
- pytz==2025.2
- pyyaml==6.0.1
- s3transfer==0.5.2
- scikit-learn==0.24.2
- scipy==1.5.4
- six==1.17.0
- sklearn==0.0
- sqlalchemy==1.4.54
- tables==3.7.0
- threadpoolctl==3.1.0
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- zipp==3.6.0
prefix: /opt/conda/envs/triage
| [
"tests/test_feature_group_mixer.py::test_feature_group_mixer_leave_one_in",
"tests/test_feature_group_mixer.py::test_feature_group_mixer_all"
]
| []
| [
"tests/test_feature_group_mixer.py::test_feature_group_mixer_leave_one_out"
]
| []
| MIT License | 1,157 | [
"triage/db.py",
"triage/model_group_stored_procedure.sql",
"example_experiment_config.yaml",
"triage/feature_group_mixer.py",
"triage/predictors.py",
"triage/feature_group_creator.py",
"triage/model_trainers.py"
]
| [
"triage/db.py",
"triage/model_group_stored_procedure.sql",
"example_experiment_config.yaml",
"triage/feature_group_mixer.py",
"triage/predictors.py",
"triage/feature_group_creator.py",
"triage/model_trainers.py"
]
|
joke2k__faker-494 | d06d05f415e97b15f21683c991511c24e12c8304 | 2017-04-06 18:16:03 | c12a23f112265bf051d720a3758f9919631734ab | diff --git a/faker/providers/file/__init__.py b/faker/providers/file/__init__.py
index b19a3c56..4f4c14ce 100644
--- a/faker/providers/file/__init__.py
+++ b/faker/providers/file/__init__.py
@@ -201,3 +201,16 @@ class Provider(BaseProvider):
"""
category = category if category else cls.random_element(list(cls.file_extensions.keys()))
return cls.random_element(cls.file_extensions[category])
+
+ @classmethod
+ def file_path(cls, depth=1, category=None, extension=None):
+ """
+ :param category: audio|image|office|text|video
+ :param extension: file extension
+ :param depth: depth of the file (depth >= 0)
+ """
+ file = Provider.file_name(category, extension)
+ path = "/{0}".format(file)
+ for d in range(0, depth):
+ path = "/{0}{1}".format(WordProvider.word(), path)
+ return path
| Add a file path provider
In the file providers would be nice to have a file_path provider who would return a path like ```/lorem/ipsum/lorem.pdf```. | joke2k/faker | diff --git a/tests/providers/file.py b/tests/providers/file.py
new file mode 100644
index 00000000..1a1617ba
--- /dev/null
+++ b/tests/providers/file.py
@@ -0,0 +1,25 @@
+from __future__ import unicode_literals
+
+import unittest
+import re
+
+from faker import Factory
+from faker.providers.file import Provider as FileProvider
+
+
+class TestFile(unittest.TestCase):
+ """ Tests file """
+
+ def setUp(self):
+ self.factory = Factory.create()
+
+ def test_file_path(self):
+ for _ in range(100):
+ file_path = FileProvider.file_path()
+ self.assertTrue(re.search(r'\/\w+\/\w+\.\w+', file_path))
+ file_path = FileProvider.file_path(depth=3)
+ self.assertTrue(re.search(r'\/\w+\/\w+\/\w+\.\w+', file_path))
+ file_path = FileProvider.file_path(extension='pdf')
+ self.assertTrue(re.search(r'\/\w+\/\w+\.pdf', file_path))
+ file_path = FileProvider.file_path(category='image')
+ self.assertTrue(re.search(r'\/\w+\/\w+\.(bmp|gif|jpeg|jpg|png|tiff)', file_path))
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 1
} | 0.7 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.6",
"reqs_path": [
"tests/requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
dnspython==2.2.1
email-validator==1.3.1
-e git+https://github.com/joke2k/faker.git@d06d05f415e97b15f21683c991511c24e12c8304#egg=Faker
idna==3.10
importlib-metadata==4.8.3
iniconfig==1.1.1
mock==1.0.1
packaging==21.3
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
python-dateutil==2.9.0.post0
six==1.17.0
tomli==1.2.3
typing_extensions==4.1.1
UkPostcodeParser==1.0.3
zipp==3.6.0
| name: faker
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- dnspython==2.2.1
- email-validator==1.3.1
- idna==3.10
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- mock==1.0.1
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- python-dateutil==2.9.0.post0
- six==1.17.0
- tomli==1.2.3
- typing-extensions==4.1.1
- ukpostcodeparser==1.0.3
- zipp==3.6.0
prefix: /opt/conda/envs/faker
| [
"tests/providers/file.py::TestFile::test_file_path"
]
| []
| []
| []
| MIT License | 1,158 | [
"faker/providers/file/__init__.py"
]
| [
"faker/providers/file/__init__.py"
]
|
|
minio__minio-py-501 | cbe6189d5ee7730931252ac6da2e2fe9af1c9b89 | 2017-04-07 06:35:56 | 41240393e679226942d2300f281602468b20c7b2 | diff --git a/docs/API.md b/docs/API.md
index 993c435..2e77e33 100644
--- a/docs/API.md
+++ b/docs/API.md
@@ -240,7 +240,6 @@ __Return Value__
|``object.last_modified`` |_datetime.datetime_ | modified time stamp. |
-
__Example__
```py
@@ -544,14 +543,15 @@ __Parameters__
|``suffix`` | _string_ | Object key suffix to filter notifications for. |
|``events`` | _list_ | Enables notifications for specific event types. |
-See [here](../examples/listen_notification.py) for a full example.
+See [here](https://raw.githubusercontent.com/minio/minio-py/master/examples/listen_notification.py) for a full example.
```py
# Put a file with default content-type.
events = minioClient.listen_bucket_notification('my-bucket', 'my-prefix/',
'.my-suffix',
['s3:ObjectCreated:*',
- 's3:ObjectRemoved:*'])
+ 's3:ObjectRemoved:*',
+ 's3:ObjectAccessed:*'])
for event in events:
print event
```
diff --git a/examples/listen_notification.py b/examples/listen_notification.py
index d4855e3..0eb309f 100644
--- a/examples/listen_notification.py
+++ b/examples/listen_notification.py
@@ -27,6 +27,7 @@ client = Minio('play.minio.io:9000',
events = client.listen_bucket_notification('my-bucket', 'my-prefix/',
'.my-suffix',
['s3:ObjectCreated:*',
- 's3:ObjectRemoved:*'])
+ 's3:ObjectRemoved:*',
+ 's3:ObjectAccessed:*'])
for event in events:
print(event)
diff --git a/minio/api.py b/minio/api.py
index 13198af..4646e78 100644
--- a/minio/api.py
+++ b/minio/api.py
@@ -485,7 +485,8 @@ class Minio(object):
def listen_bucket_notification(self, bucket_name, prefix='', suffix='',
events=['s3:ObjectCreated:*',
- 's3:ObjectRemoved:*']):
+ 's3:ObjectRemoved:*',
+ 's3:ObjectAccessed:*']):
"""
Yeilds new event notifications on a bucket, caller should iterate
to read new notifications.
@@ -504,7 +505,7 @@ class Minio(object):
url_components = urlsplit(self._endpoint_url)
if url_components.hostname == 's3.amazonaws.com':
raise InvalidArgumentError(
- 'Listening for event notifications on a bucket is a Minio '
+ 'Listening for event notifications on a bucket is a Minio '
'specific extension to bucket notification API. It is not '
'supported by Amazon S3')
@@ -1082,7 +1083,7 @@ class Minio(object):
'Content-Md5': get_md5_base64digest(content),
'Content-Length': len(content)
}
- query = {"delete": None}
+ query = {'delete': ''}
content_sha256_hex = get_sha256_hexdigest(content)
# send multi-object delete request
@@ -1778,7 +1779,7 @@ class Minio(object):
:return: location of bucket name is returned.
"""
method = 'GET'
- url = self._endpoint_url + '/' + bucket_name + '?location'
+ url = self._endpoint_url + '/' + bucket_name + '?location='
headers = {}
# default for all requests.
region = 'us-east-1'
diff --git a/minio/signer.py b/minio/signer.py
index 5cebf5e..b514cb3 100644
--- a/minio/signer.py
+++ b/minio/signer.py
@@ -207,6 +207,7 @@ def sign_v4(method, url, region, headers=None, access_key=None,
headers_to_sign,
signed_headers,
content_sha256)
+
string_to_sign = generate_string_to_sign(date, region,
canonical_req)
signing_key = generate_signing_key(date, region, secret_key)
@@ -232,16 +233,7 @@ def generate_canonical_request(method, parsed_url, headers, signed_headers, cont
:param headers: HTTP header dictionary.
:param content_sha256: Content sha256 hexdigest string.
"""
- lines = [method, parsed_url.path]
-
- # Parsed query.
- split_query = parsed_url.query.split('&')
- split_query.sort()
- for i in range(0, len(split_query)):
- if len(split_query[i]) > 0 and '=' not in split_query[i]:
- split_query[i] += '='
- query = '&'.join(split_query)
- lines.append(query)
+ lines = [method, parsed_url.path, parsed_url.query]
# Headers added to canonical request.
header_lines = []
| ListenBucketNotification should also support Get and Head
Depends on minio/minio#3916 | minio/minio-py | diff --git a/tests/unit/remove_objects_test.py b/tests/unit/remove_objects_test.py
index bfbf592..1142523 100644
--- a/tests/unit/remove_objects_test.py
+++ b/tests/unit/remove_objects_test.py
@@ -56,7 +56,7 @@ class RemoveObjectsTest(TestCase):
mock_connection.return_value = mock_server
mock_server.mock_add_request(
MockResponse('POST',
- 'https://localhost:9000/hello/?delete',
+ 'https://localhost:9000/hello/?delete=',
{'Content-Length': 95,
'User-Agent': _DEFAULT_USER_AGENT,
'Content-Md5': u'5Tg5SmU9Or43L4+iIyfPrQ=='}, 200,
@@ -72,7 +72,7 @@ class RemoveObjectsTest(TestCase):
mock_connection.return_value = mock_server
mock_server.mock_add_request(
MockResponse('POST',
- 'https://localhost:9000/hello/?delete',
+ 'https://localhost:9000/hello/?delete=',
{'Content-Length': 95,
'User-Agent': _DEFAULT_USER_AGENT,
'Content-Md5': u'5Tg5SmU9Or43L4+iIyfPrQ=='}, 200,
@@ -88,7 +88,7 @@ class RemoveObjectsTest(TestCase):
mock_connection.return_value = mock_server
mock_server.mock_add_request(
MockResponse('POST',
- 'https://localhost:9000/hello/?delete',
+ 'https://localhost:9000/hello/?delete=',
{'Content-Length': 95,
'User-Agent': _DEFAULT_USER_AGENT,
'Content-Md5': u'5Tg5SmU9Or43L4+iIyfPrQ=='}, 200,
diff --git a/tests/unit/sign_test.py b/tests/unit/sign_test.py
index 3ccbb84..1f71238 100644
--- a/tests/unit/sign_test.py
+++ b/tests/unit/sign_test.py
@@ -54,7 +54,7 @@ class CanonicalRequestTest(TestCase):
def test_request_with_query(self):
url = urlsplit('http://localhost:9000/hello?c=d&e=f&a=b')
expected_signed_headers = ['x-amz-content-sha256', 'x-amz-date']
- expected_request_array = ['PUT', '/hello', 'a=b&c=d&e=f',
+ expected_request_array = ['PUT', '/hello', 'c=d&e=f&a=b',
'x-amz-content-sha256:' + empty_hash,
'x-amz-date:dateString',
'', ';'.join(expected_signed_headers),
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 3
},
"num_modified_files": 4
} | 2.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"nose",
"mock",
"pytest"
],
"pre_install": null,
"python": "3.5",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
certifi==2021.5.30
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
-e git+https://github.com/minio/minio-py.git@cbe6189d5ee7730931252ac6da2e2fe9af1c9b89#egg=minio
mock==5.2.0
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
nose==1.3.7
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
pytz==2025.2
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
urllib3==1.26.20
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: minio-py
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib-metadata=4.8.1=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- mock==5.2.0
- nose==1.3.7
- pytz==2025.2
- urllib3==1.26.20
prefix: /opt/conda/envs/minio-py
| [
"tests/unit/remove_objects_test.py::RemoveObjectsTest::test_object_is_iterator",
"tests/unit/remove_objects_test.py::RemoveObjectsTest::test_object_is_list",
"tests/unit/remove_objects_test.py::RemoveObjectsTest::test_object_is_tuple",
"tests/unit/sign_test.py::CanonicalRequestTest::test_request_with_query"
]
| []
| [
"tests/unit/remove_objects_test.py::RemoveObjectsTest::test_bucket_invalid_name",
"tests/unit/remove_objects_test.py::RemoveObjectsTest::test_object_is_non_string_iterable_1",
"tests/unit/remove_objects_test.py::RemoveObjectsTest::test_object_is_non_string_iterable_2",
"tests/unit/remove_objects_test.py::RemoveObjectsTest::test_object_is_non_string_iterable_3",
"tests/unit/sign_test.py::CanonicalRequestTest::test_simple_request",
"tests/unit/sign_test.py::StringToSignTest::test_signing_key",
"tests/unit/sign_test.py::SigningKeyTest::test_generate_signing_key",
"tests/unit/sign_test.py::AuthorizationHeaderTest::test_generate_authentication_header",
"tests/unit/sign_test.py::PresignURLTest::test_presigned_invalid_expires",
"tests/unit/sign_test.py::PresignURLTest::test_presigned_no_access_key"
]
| []
| Apache License 2.0 | 1,159 | [
"minio/signer.py",
"docs/API.md",
"minio/api.py",
"examples/listen_notification.py"
]
| [
"minio/signer.py",
"docs/API.md",
"minio/api.py",
"examples/listen_notification.py"
]
|
|
zalando-stups__senza-462 | d5aa551914e024e9bf0bde97455db3a4eab62896 | 2017-04-07 09:26:03 | e9f84724628b4761f8d5da4d37a2993f11d6433b | coveralls:
[](https://coveralls.io/builds/10978001)
Changes Unknown when pulling **81412ebca59282719b16c1387624a0d97cbc0348 on acm-createdat-key-error** into ** on master**.
hjacobs: :+1: | diff --git a/senza/manaus/acm.py b/senza/manaus/acm.py
index 8d85313..f43ca8c 100644
--- a/senza/manaus/acm.py
+++ b/senza/manaus/acm.py
@@ -81,7 +81,7 @@ class ACMCertificate:
subject_alternative_name = certificate['SubjectAlternativeNames']
domain_validation_options = certificate['DomainValidationOptions']
subject = certificate['Subject']
- created_at = certificate['CreatedAt']
+ created_at = certificate.get('CreatedAt')
status = certificate['Status']
signature_algorithm = certificate['SignatureAlgorithm']
in_use_by = certificate['InUseBy']
| resolve_ssl_certificates for ACM: KeyError: 'CreatedAt'
```
cat /tmp/senza-traceback-28v30ylh
Traceback (most recent call last):
File "/usr/lib/python3.5/site-packages/senza/error_handling.py", line 105, in __call__
self.function(*args, **kwargs)
File "/usr/lib/python3.5/site-packages/click/core.py", line 722, in __call__
return self.main(*args, **kwargs)
File "/usr/lib/python3.5/site-packages/click/core.py", line 697, in main
rv = self.invoke(ctx)
File "/usr/lib/python3.5/site-packages/click/core.py", line 1066, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/usr/lib/python3.5/site-packages/click/core.py", line 895, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/usr/lib/python3.5/site-packages/click/core.py", line 535, in invoke
return callback(*args, **kwargs)
File "/usr/lib/python3.5/site-packages/senza/cli.py", line 580, in create
data = create_cf_template(definition, region, version, parameter, force, parameter_file)
File "/usr/lib/python3.5/site-packages/senza/cli.py", line 669, in create_cf_template
data = evaluate(definition.copy(), args, account_info, force)
File "/usr/lib/python3.5/site-packages/senza/cli.py", line 237, in evaluate
definition = componentfn(definition, configuration, args, info, force, account_info)
File "/usr/lib/python3.5/site-packages/senza/components/weighted_dns_elastic_load_balancer.py", line 49, in component_weighted_dns_elastic_load_balancer
account_info)
File "/usr/lib/python3.5/site-packages/senza/components/elastic_load_balancer.py", line 135, in component_elastic_load_balancer
listeners = resolve_ssl_certificates(listeners, subdomain, main_zone, account_info)
File "/usr/lib/python3.5/site-packages/senza/components/elastic_load_balancer.py", line 95, in resolve_ssl_certificates
ssl_cert = get_ssl_cert(subdomain, main_zone, listener, account_info)
File "/usr/lib/python3.5/site-packages/senza/components/elastic_load_balancer.py", line 52, in get_ssl_cert
reverse=True)
File "/usr/lib/python3.5/site-packages/senza/manaus/acm.py", line 178, in get_certificates
certificate = ACMCertificate.get_by_arn(self.region, arn)
File "/usr/lib/python3.5/site-packages/senza/manaus/acm.py", line 110, in get_by_arn
return cls.from_boto_dict(certificate)
File "/usr/lib/python3.5/site-packages/senza/manaus/acm.py", line 84, in from_boto_dict
created_at = certificate['CreatedAt']
KeyError: 'CreatedAt'
```
Imported certs don't have `CreatedAt`:
```
>>> c.describe_certificate(CertificateArn='arn:aws:acm:eu-central-1:123456789012:certificate/f14f9718-7da8-4250-9c21-d0341da4e44f')
{'Certificate': {'NotAfter': datetime.datetime(2017, 5, 7, 10, 0, tzinfo=tzlocal()), 'SubjectAlternativeNames': ['pierone.stups.zalan.do', 'registry.opensource.zalan.do'], 'Subject': 'C=DE,L=Berlin,O=Zalando SE,OU=CDP,CN=CDP Proxy', 'Serial': '2a:d2:f6:d3:23:62:0b:5f:f8:2f:d6:3a:4a:9f:b8:c6:48:a0:11:62', 'Type': 'IMPORTED', 'ImportedAt': datetime.datetime(2017, 4, 7, 10, 12, 55, tzinfo=tzlocal()), 'SignatureAlgorithm': 'SHA256WITHRSA', 'CertificateArn': 'arn:aws:acm:eu-central-1:123456789012:certificate/f14f9718-7da8-4250-9c21-d0341da4e44f', 'NotBefore': datetime.datetime(2017, 4, 7, 10, 0, tzinfo=tzlocal()), 'DomainValidationOptions': [{'DomainName': 'pierone.stups.zalan.do'}, {'DomainName': 'registry.opensource.zalan.do'}], 'Status': 'ISSUED', 'DomainName': 'pierone.stups.zalan.do', 'Issuer': 'Zalando SE', 'InUseBy': [], 'KeyAlgorithm': 'RSA-2048'}, 'ResponseMetadata': {'HTTPHeaders': {'content-type': 'application/x-amz-json-1.1', 'x-amzn-requestid': '514a08ad-1b71-11e7-91d6-c5bca103e8f9', 'date': 'Fri, 07 Apr 2017 09:05:16 GMT', 'content-length': '695'}, 'RequestId': '514a08ad-1b71-11e7-91d6-c5bca103e8f9', 'HTTPStatusCode': 200, 'RetryAttempts': 0}}
``` | zalando-stups/senza | diff --git a/tests/fixtures.py b/tests/fixtures.py
index 2d91e42..ae21b24 100644
--- a/tests/fixtures.py
+++ b/tests/fixtures.py
@@ -20,7 +20,7 @@ CERT1_ZO_NE = {'CertificateArn': 'arn:aws:acm:eu-west-1:cert1',
'IssuedAt': datetime(2016, 4, 1, 12, 14, 14, tzinfo=timezone.utc),
'Issuer': 'SenzaTest',
'KeyAlgorithm': 'RSA-2048',
- 'NotAfter': datetime(2017, 4, 1, 12, 14, 14, tzinfo=timezone.utc),
+ 'NotAfter': datetime(2020, 4, 1, 12, 14, 14, tzinfo=timezone.utc),
'NotBefore': datetime(2016, 4, 1, 12, 14, 14, tzinfo=timezone.utc),
'Serial': '00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00',
'SignatureAlgorithm': 'SHA256WITHRSA',
@@ -45,7 +45,7 @@ CERT1_ZO_NE_REVOKED = {'CertificateArn': 'arn:aws:acm:eu-west-1:cert1',
'IssuedAt': datetime(2016, 4, 1, 12, 14, 14, tzinfo=timezone.utc),
'Issuer': 'SenzaTest',
'KeyAlgorithm': 'RSA-2048',
- 'NotAfter': datetime(2017, 4, 1, 12, 14, 14, tzinfo=timezone.utc),
+ 'NotAfter': datetime(2020, 4, 1, 12, 14, 14, tzinfo=timezone.utc),
'NotBefore': datetime(2016, 4, 1, 12, 14, 14, tzinfo=timezone.utc),
'Serial': '00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00',
'SignatureAlgorithm': 'SHA256WITHRSA',
@@ -86,7 +86,7 @@ HOSTED_ZONE_ZO_NE_DEV = {'Config': {'PrivateZone': False},
SERVER_CERT_ZO_NE = MagicMock(name='zo-ne')
SERVER_CERT_ZO_NE.server_certificate_metadata = {'Arn': 'arn:aws:123',
'ServerCertificateName': 'zo-ne',
- 'Expiration': datetime(2017, 4, 1, 12, 14, 14,
+ 'Expiration': datetime(2020, 4, 1, 12, 14, 14,
tzinfo=timezone(timedelta(hours=2))),
'Path': '/',
'ServerCertificateId': '000',
diff --git a/tests/test_manaus/test_acm.py b/tests/test_manaus/test_acm.py
index f022ff2..a10013b 100644
--- a/tests/test_manaus/test_acm.py
+++ b/tests/test_manaus/test_acm.py
@@ -34,7 +34,7 @@ CERT1 = {'CertificateArn': 'arn:aws:acm:eu-west-1:cert1',
'IssuedAt': datetime(2016, 4, 1, 12, 14, 14, tzinfo=timezone.utc),
'Issuer': 'SenzaTest',
'KeyAlgorithm': 'RSA-2048',
- 'NotAfter': datetime(2017, 4, 1, 12, 14, 14, tzinfo=timezone.utc),
+ 'NotAfter': datetime(2020, 4, 1, 12, 14, 14, tzinfo=timezone.utc),
'NotBefore': datetime(2016, 4, 1, 12, 14, 14, tzinfo=timezone.utc),
'Serial': '00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00',
'SignatureAlgorithm': 'SHA256WITHRSA',
@@ -75,7 +75,7 @@ CERT2 = {'CertificateArn': 'arn:aws:acm:eu-west-1:cert2',
'IssuedAt': datetime(2016, 4, 1, 12, 14, 14),
'Issuer': 'SenzaTest',
'KeyAlgorithm': 'RSA-2048',
- 'NotAfter': datetime(2017, 4, 1, 12, 14, 14),
+ 'NotAfter': datetime(2020, 4, 1, 12, 14, 14),
'NotBefore': datetime(2016, 4, 1, 12, 14, 14),
'Serial': '00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00',
'SignatureAlgorithm': 'SHA256WITHRSA',
@@ -85,6 +85,23 @@ CERT2 = {'CertificateArn': 'arn:aws:acm:eu-west-1:cert2',
'*.senza.aws.example.net',
'*.app.example.net']}
+CERT3 = {
+ 'NotAfter': datetime(2017, 5, 7, 10, 0, tzinfo=timezone.utc),
+ 'SubjectAlternativeNames': ['pierone.stups.zalan.do', 'registry.opensource.zalan.do'],
+ 'Subject': 'C=DE,L=Berlin,O=Zalando SE,OU=CDP,CN=CDP Proxy',
+ 'Serial': '2a:d2:f6:d3:23:62:0b:5f:f8:2f:d6:3a:4a:9f:b8:c6:48:a0:11:62',
+ 'Type': 'IMPORTED',
+ 'ImportedAt': datetime(2017, 4, 7, 10, 12, 55, tzinfo=timezone.utc),
+ 'SignatureAlgorithm': 'SHA256WITHRSA',
+ 'CertificateArn': 'arn:aws:acm:eu-west-1:cert3',
+ 'NotBefore': datetime(2017, 4, 7, 10, 0, tzinfo=timezone.utc),
+ 'DomainValidationOptions': [{'DomainName': 'pierone.stups.zalan.do'}, {'DomainName': 'registry.opensource.zalan.do'}],
+ 'Status': 'ISSUED',
+ 'DomainName': 'pierone.stups.zalan.do',
+ 'Issuer': 'Zalando SE',
+ 'InUseBy': [],
+ 'KeyAlgorithm': 'RSA-2048'}
+
CERT_VALIDATION_TIMED_OUT = {
'KeyAlgorithm': 'RSA-2048',
'DomainName': 'alpha.example.org',
@@ -109,7 +126,7 @@ def test_certificate_valid():
assert certificate1.domain_name == '*.senza.example.com'
assert certificate1.is_valid(when=datetime(2016, 4, 5, 12, 14, 14,
tzinfo=timezone.utc))
- assert not certificate1.is_valid(when=datetime(2018, 4, 5, 12, 14, 14,
+ assert not certificate1.is_valid(when=datetime(2021, 4, 5, 12, 14, 14,
tzinfo=timezone.utc))
assert not certificate1.is_valid(when=datetime(2013, 4, 2, 10, 11, 12,
tzinfo=timezone.utc))
@@ -121,7 +138,7 @@ def test_certificate_valid():
assert certificate1_revoked.domain_name == '*.senza.example.com'
assert not certificate1_revoked.is_valid(when=datetime(2016, 4, 5, 12, 14, 14,
tzinfo=timezone.utc))
- assert not certificate1_revoked.is_valid(when=datetime(2018, 4, 5, 12, 14, 14,
+ assert not certificate1_revoked.is_valid(when=datetime(2021, 4, 5, 12, 14, 14,
tzinfo=timezone.utc))
assert not certificate1_revoked.is_valid(when=datetime(2013, 4, 2, 10, 11, 12,
tzinfo=timezone.utc))
@@ -153,7 +170,7 @@ def test_certificate_get_by_arn(monkeypatch):
assert certificate1.domain_name == '*.senza.example.com'
assert certificate1.is_valid(when=datetime(2016, 4, 5, 12, 14, 14,
tzinfo=timezone.utc))
- assert not certificate1.is_valid(when=datetime(2018, 4, 5, 12, 14, 14,
+ assert not certificate1.is_valid(when=datetime(2021, 4, 5, 12, 14, 14,
tzinfo=timezone.utc))
assert not certificate1.is_valid(when=datetime(2013, 4, 2, 10, 11, 12,
tzinfo=timezone.utc))
@@ -201,6 +218,12 @@ def test_get_certificates(monkeypatch):
assert len(certificates_net) == 1
assert certificates_net[0].arn == 'arn:aws:acm:eu-west-1:cert2'
+ m_client.describe_certificate.side_effect = [{'Certificate': CERT3}]
+ certificates_net = list(acm.get_certificates(valid_only=False,
+ domain_name="registry.opensource.zalan.do"))
+ assert len(certificates_net) == 1
+ assert certificates_net[0].arn == 'arn:aws:acm:eu-west-1:cert3'
+
def test_arn_is_acm_certificate():
assert ACMCertificate.arn_is_acm_certificate('arn:aws:acm:certificate')
| {
"commit_name": "merge_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 1
} | 2.1 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"pytest-cov"
],
"pre_install": null,
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | arrow==1.2.3
attrs==22.2.0
boto3==1.23.10
botocore==1.26.10
certifi==2021.5.30
charset-normalizer==2.0.12
click==8.0.4
clickclick==20.10.2
coverage==6.2
dnspython==2.2.1
idna==3.10
importlib-metadata==4.8.3
iniconfig==1.1.1
jmespath==0.10.0
packaging==21.3
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pystache==0.6.4
pytest==7.0.1
pytest-cov==4.0.0
python-dateutil==2.9.0.post0
PyYAML==6.0.1
raven==6.10.0
requests==2.27.1
s3transfer==0.5.2
six==1.17.0
stups-cli-support==1.1.22
stups-pierone==1.1.56
-e git+https://github.com/zalando-stups/senza.git@d5aa551914e024e9bf0bde97455db3a4eab62896#egg=stups_senza
stups-tokens==1.1.19
stups-zign==1.2
tomli==1.2.3
typing==3.7.4.3
typing_extensions==4.1.1
urllib3==1.26.20
zipp==3.6.0
| name: senza
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- arrow==1.2.3
- attrs==22.2.0
- boto3==1.23.10
- botocore==1.26.10
- charset-normalizer==2.0.12
- click==8.0.4
- clickclick==20.10.2
- coverage==6.2
- dnspython==2.2.1
- idna==3.10
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- jmespath==0.10.0
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pystache==0.6.4
- pytest==7.0.1
- pytest-cov==4.0.0
- python-dateutil==2.9.0.post0
- pyyaml==6.0.1
- raven==6.10.0
- requests==2.27.1
- s3transfer==0.5.2
- six==1.17.0
- stups-cli-support==1.1.22
- stups-pierone==1.1.56
- stups-tokens==1.1.19
- stups-zign==1.2
- tomli==1.2.3
- typing==3.7.4.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- zipp==3.6.0
prefix: /opt/conda/envs/senza
| [
"tests/test_manaus/test_acm.py::test_get_certificates"
]
| []
| [
"tests/test_manaus/test_acm.py::test_certificate_valid",
"tests/test_manaus/test_acm.py::test_certificate_comparison",
"tests/test_manaus/test_acm.py::test_certificate_get_by_arn",
"tests/test_manaus/test_acm.py::test_certificate_matches",
"tests/test_manaus/test_acm.py::test_arn_is_acm_certificate"
]
| []
| Apache License 2.0 | 1,160 | [
"senza/manaus/acm.py"
]
| [
"senza/manaus/acm.py"
]
|
DinoTools__python-overpy-64 | d49ad8080ff9e9da3081a240380915b159172a87 | 2017-04-07 12:09:15 | cc0cf1993f129036e35a958a519c76bfad88891f | diff --git a/overpy/__init__.py b/overpy/__init__.py
index 13de583..377c28d 100644
--- a/overpy/__init__.py
+++ b/overpy/__init__.py
@@ -349,23 +349,39 @@ class Result(object):
return result
@classmethod
- def from_xml(cls, data, api=None, parser=XML_PARSER_SAX):
+ def from_xml(cls, data, api=None, parser=None):
"""
- Create a new instance and load data from xml object.
+ Create a new instance and load data from xml data or object.
+
+ .. note::
+ If parser is set to None, the functions tries to find the best parse.
+ By default the SAX parser is chosen if a string is provided as data.
+ The parser is set to DOM if an xml.etree.ElementTree.Element is provided as data value.
:param data: Root element
- :type data: xml.etree.ElementTree.Element
- :param api:
+ :type data: str | xml.etree.ElementTree.Element
+ :param api: The instance to query additional information if required.
:type api: Overpass
- :param parser: Specify the parser to use(DOM or SAX)
- :type parser: Integer
+ :param parser: Specify the parser to use(DOM or SAX)(Default: None = autodetect, defaults to SAX)
+ :type parser: Integer | None
:return: New instance of Result object
:rtype: Result
"""
+ if parser is None:
+ if isinstance(data, str):
+ parser = XML_PARSER_SAX
+ else:
+ parser = XML_PARSER_DOM
+
result = cls(api=api)
if parser == XML_PARSER_DOM:
import xml.etree.ElementTree as ET
- root = ET.fromstring(data)
+ if isinstance(data, str):
+ root = ET.fromstring(data)
+ elif isinstance(data, ET.Element):
+ root = data
+ else:
+ raise exception.OverPyException("Unable to detect data type.")
for elem_cls in [Node, Way, Relation, Area]:
for child in root:
| Result.from_xml data argument has a wrong type doc
Passing Element as a data argument to Result.from_xml() as docstring suggests raises Exception.
```
>>> overpy.Result.from_xml(xml.etree.ElementTree.fromstring("<osm />"))
Traceback (most recent call last):
File "<input>", line 1, in <module>
File "/home/w/osm-borders/lib/python3.5/site-packages/overpy/__init__.py", line 310, in from_xml
source = StringIO(data)
TypeError: initial_value must be str or None, not xml.etree.ElementTree.Element
```
It looks like it expects string right now:
```
>>> overpy.Result.from_xml("<osm />")
<overpy.Result object at 0x7faf632b2eb8>
```
My python version is:
$ python --version
Python 3.5.2
DISTRIB_ID=Ubuntu
DISTRIB_RELEASE=16.10
DISTRIB_CODENAME=yakkety
DISTRIB_DESCRIPTION="Ubuntu 16.10"
(My guess that Ubuntu's Python 3.5.2 is far more similar to Python 3.6 than to 3.5) | DinoTools/python-overpy | diff --git a/tests/test_xml.py b/tests/test_xml.py
index d81f003..e414ad4 100644
--- a/tests/test_xml.py
+++ b/tests/test_xml.py
@@ -171,6 +171,28 @@ class TestDataError(object):
overpy.Way.from_xml(node)
+class TestParser(BaseTestNodes):
+ def test_exception(self):
+ with pytest.raises(overpy.exception.OverPyException):
+ overpy.Result.from_xml(123)
+
+ def test_xml_element(self):
+ import xml.etree.ElementTree as ET
+ data = read_file("xml/node-01.xml")
+ root = ET.fromstring(data)
+ result = overpy.Result.from_xml(root)
+
+ assert isinstance(result, overpy.Result)
+ self._test_node01(result)
+
+ def test_xml_autodetect_parser(self):
+ data = read_file("xml/node-01.xml")
+ result = overpy.Result.from_xml(data)
+
+ assert isinstance(result, overpy.Result)
+ self._test_node01(result)
+
+
class TestRemark(object):
def test_remark_runtime_error(self):
api = overpy.Overpass()
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 1,
"test_score": 2
},
"num_modified_files": 1
} | 0.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.6",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
certifi==2021.5.30
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
-e git+https://github.com/DinoTools/python-overpy.git@d49ad8080ff9e9da3081a240380915b159172a87#egg=overpy
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: python-overpy
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib-metadata=4.8.1=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
prefix: /opt/conda/envs/python-overpy
| [
"tests/test_xml.py::TestParser::test_exception",
"tests/test_xml.py::TestParser::test_xml_element"
]
| []
| [
"tests/test_xml.py::TestAreas::test_node01",
"tests/test_xml.py::TestNodes::test_node01",
"tests/test_xml.py::TestRelation::test_relation01",
"tests/test_xml.py::TestRelation::test_relation02",
"tests/test_xml.py::TestRelation::test_relation03",
"tests/test_xml.py::TestRelation::test_relation04",
"tests/test_xml.py::TestWay::test_way01",
"tests/test_xml.py::TestWay::test_way02",
"tests/test_xml.py::TestWay::test_way03",
"tests/test_xml.py::TestWay::test_way04",
"tests/test_xml.py::TestDataError::test_element_wrong_type",
"tests/test_xml.py::TestDataError::test_node_missing_data",
"tests/test_xml.py::TestDataError::test_relation_missing_data",
"tests/test_xml.py::TestDataError::test_way_missing_data",
"tests/test_xml.py::TestParser::test_xml_autodetect_parser",
"tests/test_xml.py::TestRemark::test_remark_runtime_error",
"tests/test_xml.py::TestRemark::test_remark_runtime_remark",
"tests/test_xml.py::TestRemark::test_remark_unknown"
]
| []
| MIT License | 1,161 | [
"overpy/__init__.py"
]
| [
"overpy/__init__.py"
]
|
|
DinoTools__python-overpy-65 | daa6a72748c3e682f4ffe65fe4b9bfbc788d10b1 | 2017-04-07 12:40:06 | cc0cf1993f129036e35a958a519c76bfad88891f | diff --git a/overpy/__init__.py b/overpy/__init__.py
index 377c28d..6aa887a 100644
--- a/overpy/__init__.py
+++ b/overpy/__init__.py
@@ -19,6 +19,16 @@ PY3 = sys.version_info[0] == 3
XML_PARSER_DOM = 1
XML_PARSER_SAX = 2
+# Try to convert some common attributes
+# http://wiki.openstreetmap.org/wiki/Elements#Common_attributes
+GLOBAL_ATTRIBUTE_MODIFIERS = {
+ "changeset": int,
+ "timestamp": lambda ts: datetime.strptime(ts, "%Y-%m-%dT%H:%M:%SZ"),
+ "uid": int,
+ "version": int,
+ "visible": lambda v: v.lower() == "true"
+}
+
if PY2:
from urllib2 import urlopen
from urllib2 import HTTPError
@@ -608,17 +618,10 @@ class Element(object):
"""
self._result = result
- # Try to convert some common attributes
- # http://wiki.openstreetmap.org/wiki/Elements#Common_attributes
- self._attribute_modifiers = {
- "changeset": int,
- "timestamp": lambda ts: datetime.strptime(ts, "%Y-%m-%dT%H:%M:%SZ"),
- "uid": int,
- "version": int,
- "visible": lambda v: v.lower() == "true"
- }
self.attributes = attributes
- for n, m in self._attribute_modifiers.items():
+ # ToDo: Add option to modify attribute modifiers
+ attribute_modifiers = dict(GLOBAL_ATTRIBUTE_MODIFIERS.items())
+ for n, m in attribute_modifiers.items():
if n in self.attributes:
self.attributes[n] = m(self.attributes[n])
self.id = None
| Can't pickle overpy.Result
<!--- Verify first that your issue/request has not already been reported -->
##### Issue type
- Bug Report
##### OverPy version
<!--- Paste verbatim output from “python -c 'import overpy; print(overpy.__version__)'” or the hash of the git commit between quotes below. -->
```
0.4
```
##### OS
<!---
Add information about the os you are using OverPy on.
Pick one below and delete the rest:
-->
- Windows 7 x64 HP
#### Python version
<!-- Pick at least one below and delete the rest -->
- Python 3.5 x64
##### Summary
Neither pickle nor _pickle Python modules can't dump query result.
##### Steps to reproduce
```python
import overpy
import pickle
op = overpy.Overpass()
sample = op.query("""
way(50.746,7.154,50.748,7.157) ["highway"];
(._;>;);
out body;
""")
with open('sample.pcl', 'wb') as pcl:
pickle.dump(sample, pcl)
```
<!--- You can also add links to gist.github.com. -->
##### Expected results
pcl-file full of usefull data =)
##### Actual results
Empty plc-file and exception:
```
11 with open('sample.pcl', 'wb') as pcl:
---> 12 pickle.dump(sample, pcl)
AttributeError: Can't pickle local object 'Element.__init__.<locals>.<lambda>'
```
| DinoTools/python-overpy | diff --git a/tests/test_result.py b/tests/test_result.py
index 784208b..d83c0d7 100644
--- a/tests/test_result.py
+++ b/tests/test_result.py
@@ -2,6 +2,7 @@ import pytest
import overpy
+from tests.base_class import BaseTestWay
from tests import read_file, new_server_thread, stop_server_thread, BaseHTTPRequestHandler
@@ -109,6 +110,23 @@ class TestNode(object):
stop_server_thread(server)
+class TestPickle(BaseTestWay):
+ def test_way02(self):
+ """
+ Try to pickle and unpickle the result object
+ """
+ import pickle
+
+ api = overpy.Overpass()
+ result = api.parse_json(read_file("json/way-02.json"))
+ self._test_way02(result)
+ # do pickle and unpickle
+ result_string = pickle.dumps(result)
+ new_result = pickle.loads(result_string)
+ # test new result
+ self._test_way02(new_result)
+
+
class TestRelation(object):
def test_missing_unresolvable(self):
url, server = new_server_thread(HandleResponseJSON02)
| {
"commit_name": "head_commit",
"failed_lite_validators": [],
"has_test_patch": true,
"is_lite": true,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 0,
"test_score": 0
},
"num_modified_files": 1
} | 0.4 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest"
],
"pre_install": null,
"python": "3.6",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
certifi==2021.5.30
importlib-metadata @ file:///tmp/build/80754af9/importlib-metadata_1631916693255/work
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
-e git+https://github.com/DinoTools/python-overpy.git@daa6a72748c3e682f4ffe65fe4b9bfbc788d10b1#egg=overpy
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: python-overpy
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib-metadata=4.8.1=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
prefix: /opt/conda/envs/python-overpy
| [
"tests/test_result.py::TestPickle::test_way02"
]
| []
| [
"tests/test_result.py::TestResult::test_expand_error",
"tests/test_result.py::TestResult::test_expand_01",
"tests/test_result.py::TestArea::test_missing_unresolvable",
"tests/test_result.py::TestArea::test_missing_resolvable",
"tests/test_result.py::TestNode::test_missing_unresolvable",
"tests/test_result.py::TestNode::test_missing_resolvable",
"tests/test_result.py::TestRelation::test_missing_unresolvable",
"tests/test_result.py::TestRelation::test_missing_resolvable",
"tests/test_result.py::TestWay::test_missing_unresolvable",
"tests/test_result.py::TestWay::test_missing_resolvable"
]
| []
| MIT License | 1,162 | [
"overpy/__init__.py"
]
| [
"overpy/__init__.py"
]
|
|
conjure-up__conjure-up-808 | f0d89c58e62dee3e64420a5b7a321b92e8f92e4f | 2017-04-07 18:36:41 | e9fe156b6121631a3c20ce6f06ab94269da77100 | mbruzek: +1
johnsca: PR updated to fix test. | diff --git a/conjureup/controllers/newcloud/gui.py b/conjureup/controllers/newcloud/gui.py
index c143f03..7a8c603 100644
--- a/conjureup/controllers/newcloud/gui.py
+++ b/conjureup/controllers/newcloud/gui.py
@@ -177,21 +177,12 @@ class NewCloudController:
# information.
cloud_type = juju.get_cloud_types_by_name()[app.current_cloud]
- try:
- if cloud_type == 'lxd':
- lxd = common.is_lxd_ready()
- if not lxd['ready']:
- return controllers.use('lxdsetup').render(lxd['msg'])
- self.__do_bootstrap()
- return
- except LookupError as e:
- # TODO: Add vsphere once lp bug 1671650 is resolved
- if cloud_type in ['maas']:
- app.log.debug(
- "Not a cloud, using provider type: {}".format(
- app.current_cloud))
- else:
- raise Exception(e)
+ if cloud_type == 'localhost':
+ lxd = common.is_lxd_ready()
+ if not lxd['ready']:
+ return controllers.use('lxdsetup').render(lxd['msg'])
+ self.__do_bootstrap()
+ return
# XXX: always prompt for maas information for now as there is no way to
# logically store the maas server ip for future sessions.
diff --git a/conjureup/controllers/newcloud/tui.py b/conjureup/controllers/newcloud/tui.py
index dce8e7d..a4c4a99 100644
--- a/conjureup/controllers/newcloud/tui.py
+++ b/conjureup/controllers/newcloud/tui.py
@@ -43,8 +43,8 @@ class NewCloudController:
return controllers.use('deploy').render()
def render(self):
- cloud = juju.get_cloud(app.current_cloud)
- if cloud['type'] != 'lxd':
+ cloud_type = juju.get_cloud_types_by_name()[app.current_cloud]
+ if cloud_type != 'localhost':
if not common.try_get_creds(app.current_cloud):
utils.warning("You attempted to do an install against a cloud "
"that requires credentials that could not be "
@@ -54,7 +54,7 @@ class NewCloudController:
"{}`.".format(app.current_cloud))
sys.exit(1)
- if cloud['type'] == 'lxd':
+ if cloud_type == 'localhost':
lxd = common.is_lxd_ready()
if not lxd['ready']:
return controllers.use('lxdsetup').render(lxd['msg'])
| conjure-up error: Unable to find credentials for cloud looking for local-host
Please outline the hardware you're running conjure-up on, including number of cpus, amount of memory, amount of storage space available: i7 4-core CPU, 16MB of RAM, ~450GB free disk space.
What spell was selected?: kubernetes-core
What cloud provider was selected (this includes both MAAS and localhost)?: localhost
Please outline what commands were run to install and execute conjure-up: snap refresh --channel edge conjure-up
Please post **~/.cache/conjure-up/conjure-up.log**:
[conjure-up.log.zip](https://github.com/conjure-up/conjure-up/files/906201/conjure-up.log.zip)
Error occurred on 2017-04-07 I see no errors in the log for this day.w
Provide the output of the following commands:
```
mbruzek@pandora:~$ which -a juju
/snap/bin/juju
mbruzek@pandora:~$ which -a conjure-up
/snap/bin/conjure-up
mbruzek@pandora:~$ which -a lxd
/usr/bin/lxd
mbruzek@pandora:~$ juju version
2.2-beta3-yakkety-amd64
mbruzek@pandora:~$ conjure-up --version
conjure-up 2.2-beta3
mbruzek@pandora:~$ lxd --version
2.12
mbruzek@pandora:~$ cat /etc/lsb-release
DISTRIB_ID=Ubuntu
DISTRIB_RELEASE=16.10
DISTRIB_CODENAME=yakkety
DISTRIB_DESCRIPTION="Ubuntu 16.10"
```
Summary of problem:
I tried to conjure-up kubernetes core from the edge channel and got an error about credentials.

| conjure-up/conjure-up | diff --git a/test/test_controllers_newcloud_gui.py b/test/test_controllers_newcloud_gui.py
index 33491aa..322004c 100644
--- a/test/test_controllers_newcloud_gui.py
+++ b/test/test_controllers_newcloud_gui.py
@@ -47,7 +47,7 @@ class NewCloudGUIRenderTestCase(unittest.TestCase):
self.mock_common = self.common_patcher.start()
self.mock_juju = self.juju_patcher.start()
self.mock_juju.get_cloud_types_by_name.return_value = {'localhost':
- 'lxd'}
+ 'localhost'}
self.track_screen_patcher = patch(
'conjureup.controllers.newcloud.gui.track_screen')
self.mock_track_screen = self.track_screen_patcher.start()
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_media",
"has_many_modified_files"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 1,
"test_score": 3
},
"num_modified_files": 2
} | 2.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"nose",
"tox",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.5",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
backcall==0.2.0
certifi==2021.5.30
charset-normalizer==2.0.12
-e git+https://github.com/conjure-up/conjure-up.git@f0d89c58e62dee3e64420a5b7a321b92e8f92e4f#egg=conjure_up
decorator==5.1.1
distlib==0.3.9
filelock==3.4.1
idna==3.10
importlib-metadata==4.8.3
importlib-resources==5.4.0
iniconfig==1.1.1
ipython==7.16.3
ipython-genutils==0.2.0
jedi==0.17.2
Jinja2==3.0.3
juju-wait==2.8.4
MarkupSafe==2.0.1
nose==1.3.7
oauthlib==3.2.2
packaging==21.3
parso==0.7.1
pexpect==4.9.0
pickleshare==0.7.5
platformdirs==2.4.0
pluggy==1.0.0
prettytable==2.5.0
progressbar2==3.55.0
prompt-toolkit==3.0.36
ptyprocess==0.7.0
py==1.11.0
Pygments==2.14.0
pyparsing==3.1.4
pytest==7.0.1
python-utils==3.5.2
PyYAML==6.0.1
requests==2.27.1
requests-oauthlib==2.0.0
six==1.17.0
termcolor==1.1.0
toml==0.10.2
tomli==1.2.3
tox==3.28.0
traitlets==4.3.3
typing_extensions==4.1.1
urllib3==1.26.20
urwid==2.1.2
virtualenv==20.17.1
wcwidth==0.2.13
ws4py==0.3.4
zipp==3.6.0
| name: conjure-up
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- backcall==0.2.0
- charset-normalizer==2.0.12
- decorator==5.1.1
- distlib==0.3.9
- filelock==3.4.1
- idna==3.10
- importlib-metadata==4.8.3
- importlib-resources==5.4.0
- iniconfig==1.1.1
- ipython==7.16.3
- ipython-genutils==0.2.0
- jedi==0.17.2
- jinja2==3.0.3
- juju-wait==2.8.4
- markupsafe==2.0.1
- nose==1.3.7
- oauthlib==3.2.2
- packaging==21.3
- parso==0.7.1
- pexpect==4.9.0
- pickleshare==0.7.5
- platformdirs==2.4.0
- pluggy==1.0.0
- prettytable==2.5.0
- progressbar2==3.55.0
- prompt-toolkit==3.0.36
- ptyprocess==0.7.0
- py==1.11.0
- pygments==2.14.0
- pyparsing==3.1.4
- pytest==7.0.1
- python-utils==3.5.2
- pyyaml==6.0.1
- requests==2.27.1
- requests-oauthlib==2.0.0
- six==1.17.0
- termcolor==1.1.0
- toml==0.10.2
- tomli==1.2.3
- tox==3.28.0
- traitlets==4.3.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- urwid==2.1.2
- virtualenv==20.17.1
- wcwidth==0.2.13
- ws4py==0.3.4
- zipp==3.6.0
prefix: /opt/conda/envs/conjure-up
| [
"test/test_controllers_newcloud_gui.py::NewCloudGUIRenderTestCase::test_render"
]
| []
| [
"test/test_controllers_newcloud_gui.py::NewCloudGUIRenderTestCase::test_lxd_version_to_low",
"test/test_controllers_newcloud_gui.py::NewCloudGUIFinishTestCase::test_finish"
]
| []
| MIT License | 1,163 | [
"conjureup/controllers/newcloud/gui.py",
"conjureup/controllers/newcloud/tui.py"
]
| [
"conjureup/controllers/newcloud/gui.py",
"conjureup/controllers/newcloud/tui.py"
]
|
pypa__setuptools_scm-163 | 10d75f60b0e7a949c6ad7f108a0cc1a044290fce | 2017-04-07 20:51:50 | 0373c11d2c8968a857ff06c94f101abebf825507 | diff --git a/.hgtags b/.hgtags
new file mode 100644
index 0000000..1af47c2
--- /dev/null
+++ b/.hgtags
@@ -0,0 +1,1 @@
+1092123ef78598eade56aa9c57e484f3670c8da8 v1.9.0
diff --git a/.travis.yml b/.travis.yml
index 205f2ea..b731edf 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -6,7 +6,7 @@ python:
- '3.3'
- '3.4'
- '3.5'
-- '3.6'
+- '3.6-dev'
env:
- TOXENV=py-test
@@ -46,4 +46,4 @@ deploy:
- <<: *pypi
on:
tags: true
- distributions: "bdist_egg"
+ distributions: "bdist_egg"
\ No newline at end of file
diff --git a/CHANGELOG.rst b/CHANGELOG.rst
index 6956fae..c71114b 100644
--- a/CHANGELOG.rst
+++ b/CHANGELOG.rst
@@ -2,7 +2,7 @@ v1.15.3
=======
* bring back correctly getting our version in the own sdist, finalizes #114
-
+* fix issue #150: strip local components of tags
v1.15.2
=======
@@ -15,9 +15,6 @@ v1.15.1
* fix issue #126: the local part of any tags is discarded
when guessing new versions
-* minor performance optimization by doing fewer git calls
- in the usual cases
-
v1.15.0
=======
diff --git a/README.rst b/README.rst
index 4262fdd..5d83ba2 100644
--- a/README.rst
+++ b/README.rst
@@ -114,18 +114,15 @@ and uses roughly the following logic to render the version:
:code:`no distance and clean`:
:code:`{tag}`
:code:`distance and clean`:
- :code:`{next_version}.dev{distance}+{scm letter}{revision hash}`
+ :code:`{next_version}.dev{distance}+n{revision hash}`
:code:`no distance and not clean`:
:code:`{tag}+dYYYMMMDD`
:code:`distance and not clean`:
- :code:`{next_version}.dev{distance}+{scm letter}{revision hash}.dYYYMMMDD`
+ :code:`{next_version}.dev{distance}+n{revision hash}.dYYYMMMDD`
The next version is calculated by adding ``1`` to the last numeric component
of the tag.
-For git projects, the version relies on `git describe <https://git-scm.com/docs/git-describe>`_,
-so you will see an additional ``g`` prepended to the ``{revision hash}``.
-
Semantic Versioning (SemVer)
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
diff --git a/setup.cfg b/setup.cfg
index d0092d4..f5f917c 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -1,10 +1,6 @@
[bdist_wheel]
universal=1
-[metadata]
-# ensure that the LICENSE file is included in the built wheels
-license_file = LICENSE
-
[devpi:upload]
formats=sdist,bdist_wheel
diff --git a/setuptools_scm/git.py b/setuptools_scm/git.py
index b60279c..e330e58 100644
--- a/setuptools_scm/git.py
+++ b/setuptools_scm/git.py
@@ -4,7 +4,7 @@ from os.path import abspath, normcase, realpath, isfile, join
import warnings
FILES_COMMAND = 'git ls-files'
-DEFAULT_DESCRIBE = 'git describe --dirty --tags --long --match *.*'
+DEFAULT_DESCRIBE = 'git describe --tags --long --match *.*'
def _normalized(path):
@@ -83,31 +83,21 @@ def parse(root, describe_command=DEFAULT_DESCRIBE, pre_parse=warn_on_shallow):
return
if pre_parse:
pre_parse(wd)
+ rev_node = wd.node()
+ dirty = wd.is_dirty()
+
+ if rev_node is None:
+ return meta('0.0', distance=0, dirty=dirty)
out, err, ret = do_ex(describe_command, root)
if ret:
- # If 'git describe' failed, try to get the information otherwise.
- rev_node = wd.node()
- dirty = wd.is_dirty()
-
- if rev_node is None:
- return meta('0.0', distance=0, dirty=dirty)
-
return meta(
'0.0',
distance=wd.count_all_nodes(),
- node='g' + rev_node,
+ node=rev_node,
dirty=dirty,
)
- # 'out' looks e.g. like 'v1.5.0-0-g4060507' or
- # 'v1.15.1rc1-37-g9bd1298-dirty'.
- if out.endswith('-dirty'):
- dirty = True
- out = out[:-6]
- else:
- dirty = False
-
tag, number, node = out.rsplit('-', 2)
number = int(number)
if number:
diff --git a/setuptools_scm/hg.py b/setuptools_scm/hg.py
index cda5fc3..9e126a2 100644
--- a/setuptools_scm/hg.py
+++ b/setuptools_scm/hg.py
@@ -7,7 +7,7 @@ FILES_COMMAND = 'hg locate -I .'
def _hg_tagdist_normalize_tagcommit(root, tag, dist, node):
dirty = node.endswith('+')
- node = 'h' + node.strip('+')
+ node = node.strip('+')
revset = ("(branch(.) and tag({tag!r})::. and file('re:^(?!\.hgtags).*$')"
" - tag({tag!r}))").format(tag=tag)
if tag != '0.0':
@@ -58,17 +58,14 @@ def parse(root):
def archival_to_version(data):
trace('data', data)
- node = data.get('node', '')[:12]
- if node:
- node = 'h' + node
if 'tag' in data:
return meta(data['tag'])
elif 'latesttag' in data:
return meta(data['latesttag'],
distance=data['latesttagdistance'],
- node=node)
+ node=data['node'][:12])
else:
- return meta('0.0', node=node)
+ return meta('0.0', node=data.get('node', '')[:12])
def parse_archival(root):
diff --git a/setuptools_scm/version.py b/setuptools_scm/version.py
index 042644c..d2caf3d 100644
--- a/setuptools_scm/version.py
+++ b/setuptools_scm/version.py
@@ -1,5 +1,6 @@
from __future__ import print_function
import datetime
+import warnings
import re
from .utils import trace
@@ -31,8 +32,12 @@ def callable_or_entrypoint(group, callable_or_name):
def tag_to_version(tag):
trace('tag', tag)
+ if '+' in tag:
+ warnings.warn("tag %r will be stripped of the local component" % tag)
+ tag = tag.split('+')[0]
# lstrip the v because of py2/py3 differences in setuptools
# also required for old versions of setuptools
+
version = tag.rsplit('-', 1)[-1].lstrip('v')
if parse_version is None:
return version
@@ -125,7 +130,7 @@ def get_local_node_and_date(version):
if version.exact or version.node is None:
return version.format_choice("", "+d{time:%Y%m%d}")
else:
- return version.format_choice("+{node}", "+{node}.d{time:%Y%m%d}")
+ return version.format_choice("+n{node}", "+n{node}.d{time:%Y%m%d}")
def get_local_dirty_tag(version):
| cant parse version None
With a repo on an oddly-tagged commit,
```
$ git describe --all
23.46.2post210+gbe48adfpost3+g0cc25f2
```
I get an odd error message for `setuptools_scm.get_version()`:
```
$ rwt -q setuptools_scm==1.15.0 -- -c "import setuptools_scm; setuptools_scm.get_version()"
Traceback (most recent call last):
File "<string>", line 1, in <module>
File "/var/folders/c6/v7hnmq453xb6p2dbz1gqc6rr0000gn/T/rwt-d1ylkuc8/setuptools_scm/__init__.py", line 117, in get_version
parsed_version = _do_parse(root, parse)
File "/var/folders/c6/v7hnmq453xb6p2dbz1gqc6rr0000gn/T/rwt-d1ylkuc8/setuptools_scm/__init__.py", line 82, in _do_parse
version = version_from_scm(root)
File "/var/folders/c6/v7hnmq453xb6p2dbz1gqc6rr0000gn/T/rwt-d1ylkuc8/setuptools_scm/__init__.py", line 31, in version_from_scm
return _version_from_entrypoint(root, 'setuptools_scm.parse_scm')
File "/var/folders/c6/v7hnmq453xb6p2dbz1gqc6rr0000gn/T/rwt-d1ylkuc8/setuptools_scm/__init__.py", line 37, in _version_from_entrypoint
return ep.load()(root)
File "/var/folders/c6/v7hnmq453xb6p2dbz1gqc6rr0000gn/T/rwt-d1ylkuc8/setuptools_scm/git.py", line 103, in parse
return meta(tag, node=node, dirty=dirty)
File "/var/folders/c6/v7hnmq453xb6p2dbz1gqc6rr0000gn/T/rwt-d1ylkuc8/setuptools_scm/version.py", line 86, in meta
assert tag is not None, 'cant parse version %s' % tag
AssertionError: cant parse version None
```
I would expect the tag it couldn't parse to be `23.46.2post210+gbe48adfpost3+g0cc25f2` and not `None`. | pypa/setuptools_scm | diff --git a/testing/test_git.py b/testing/test_git.py
index 9e951a9..5e60d42 100644
--- a/testing/test_git.py
+++ b/testing/test_git.py
@@ -19,21 +19,25 @@ def test_version_from_git(wd):
assert wd.version == '0.1.dev0'
wd.commit_testfile()
- assert wd.version.startswith('0.1.dev1+g')
+ assert wd.version.startswith('0.1.dev1+')
assert not wd.version.endswith('1-')
wd('git tag v0.1')
assert wd.version == '0.1'
wd.write('test.txt', 'test2')
- assert wd.version.startswith('0.2.dev0+g')
+ assert wd.version.startswith('0.2.dev0+')
wd.commit_testfile()
- assert wd.version.startswith('0.2.dev1+g')
+ assert wd.version.startswith('0.2.dev1+')
wd('git tag version-0.2')
assert wd.version.startswith('0.2')
+ wd.commit_testfile()
+ wd('git tag version-0.2.post210+gbe48adfpost3+g0cc25f2')
+ assert wd.version.startswith('0.2')
+
@pytest.mark.issue(108)
@pytest.mark.issue(109)
@@ -101,4 +105,4 @@ def test_parse_no_worktree(tmpdir):
def test_alphanumeric_tags_match(wd):
wd.commit_testfile()
wd('git tag newstyle-development-started')
- assert wd.version.startswith('0.1.dev1+g')
+ assert wd.version.startswith('0.1.dev1+')
diff --git a/testing/test_mercurial.py b/testing/test_mercurial.py
index 1fe6841..100094e 100644
--- a/testing/test_mercurial.py
+++ b/testing/test_mercurial.py
@@ -15,7 +15,7 @@ def wd(wd):
archival_mapping = {
'1.0': {'tag': '1.0'},
- '1.1.dev3+h000000000000': {
+ '1.1.dev3+n000000000000': {
'latesttag': '1.0',
'latesttagdistance': '3',
'node': '0'*20,
@@ -91,7 +91,7 @@ def test_version_from_archival(wd):
'latesttagdistance: 3\n'
)
- assert wd.version == '0.2.dev3+h000000000000'
+ assert wd.version == '0.2.dev3+n000000000000'
@pytest.mark.issue('#72')
diff --git a/testing/test_regressions.py b/testing/test_regressions.py
index 9d24d61..4a067b3 100644
--- a/testing/test_regressions.py
+++ b/testing/test_regressions.py
@@ -18,16 +18,16 @@ def test_pkginfo_noscmroot(tmpdir, monkeypatch):
'from setuptools import setup;'
'setup(use_scm_version={"root": ".."})')
- _, stderr, ret = do_ex((sys.executable, 'setup.py', '--version'), p)
+ _, stderr, ret = do_ex('python setup.py --version', p)
assert 'setuptools-scm was unable to detect version for' in stderr
assert ret == 1
p.join("PKG-INFO").write('Version: 1.0')
- res = do((sys.executable, 'setup.py', '--version'), p)
+ res = do('python setup.py --version', p)
assert res == '1.0'
do('git init', p.dirpath())
- res = do((sys.executable, 'setup.py', '--version'), p)
+ res = do('python setup.py --version', p)
assert res == '1.0'
@@ -64,7 +64,7 @@ setup(use_scm_version=vcfg)
''')
p.join("PKG-INFO").write('Version: 1.0')
- res = do((sys.executable, 'setup.py', '--version'), p)
+ res = do('python setup.py --version', p)
assert res == '1.0'
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_added_files",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 2,
"issue_text_score": 0,
"test_score": 3
},
"num_modified_files": 7
} | 1.15 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "pytest",
"pip_packages": [
"pytest",
"flake8",
"wheel",
"twine"
],
"pre_install": [
"python setup.py egg_info"
],
"python": "3.6",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs @ file:///opt/conda/conda-bld/attrs_1642510447205/work
bleach==4.1.0
certifi==2021.5.30
cffi==1.15.1
charset-normalizer==2.0.12
colorama==0.4.5
cryptography==40.0.2
docutils==0.18.1
flake8==5.0.4
idna==3.10
importlib-metadata==4.2.0
importlib-resources==5.4.0
iniconfig @ file:///home/linux1/recipes/ci/iniconfig_1610983019677/work
jeepney==0.7.1
keyring==23.4.1
mccabe==0.7.0
more-itertools @ file:///tmp/build/80754af9/more-itertools_1637733554872/work
packaging @ file:///tmp/build/80754af9/packaging_1637314298585/work
pkginfo==1.10.0
pluggy @ file:///tmp/build/80754af9/pluggy_1615976315926/work
py @ file:///opt/conda/conda-bld/py_1644396412707/work
pycodestyle==2.9.1
pycparser==2.21
pyflakes==2.5.0
Pygments==2.14.0
pyparsing @ file:///tmp/build/80754af9/pyparsing_1635766073266/work
pytest==6.2.4
readme-renderer==34.0
requests==2.27.1
requests-toolbelt==1.0.0
rfc3986==1.5.0
SecretStorage==3.3.3
-e git+https://github.com/pypa/setuptools_scm.git@10d75f60b0e7a949c6ad7f108a0cc1a044290fce#egg=setuptools_scm
six==1.17.0
toml @ file:///tmp/build/80754af9/toml_1616166611790/work
tqdm==4.64.1
twine==3.8.0
typing_extensions @ file:///opt/conda/conda-bld/typing_extensions_1647553014482/work
urllib3==1.26.20
webencodings==0.5.1
zipp @ file:///tmp/build/80754af9/zipp_1633618647012/work
| name: setuptools_scm
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- attrs=21.4.0=pyhd3eb1b0_0
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- importlib_metadata=4.8.1=hd3eb1b0_0
- iniconfig=1.1.1=pyhd3eb1b0_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- more-itertools=8.12.0=pyhd3eb1b0_0
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- packaging=21.3=pyhd3eb1b0_0
- pip=21.2.2=py36h06a4308_0
- pluggy=0.13.1=py36h06a4308_0
- py=1.11.0=pyhd3eb1b0_0
- pyparsing=3.0.4=pyhd3eb1b0_0
- pytest=6.2.4=py36h06a4308_2
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- toml=0.10.2=pyhd3eb1b0_0
- typing_extensions=4.1.1=pyh06a4308_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zipp=3.6.0=pyhd3eb1b0_0
- zlib=1.2.13=h5eee18b_1
- pip:
- bleach==4.1.0
- cffi==1.15.1
- charset-normalizer==2.0.12
- colorama==0.4.5
- cryptography==40.0.2
- docutils==0.18.1
- flake8==5.0.4
- idna==3.10
- importlib-metadata==4.2.0
- importlib-resources==5.4.0
- jeepney==0.7.1
- keyring==23.4.1
- mccabe==0.7.0
- pkginfo==1.10.0
- pycodestyle==2.9.1
- pycparser==2.21
- pyflakes==2.5.0
- pygments==2.14.0
- readme-renderer==34.0
- requests==2.27.1
- requests-toolbelt==1.0.0
- rfc3986==1.5.0
- secretstorage==3.3.3
- six==1.17.0
- tqdm==4.64.1
- twine==3.8.0
- urllib3==1.26.20
- webencodings==0.5.1
prefix: /opt/conda/envs/setuptools_scm
| [
"testing/test_mercurial.py::test_archival_to_version[1.1.dev3+n000000000000-data2]",
"testing/test_mercurial.py::test_version_from_archival"
]
| [
"testing/test_mercurial.py::test_archival_to_version[1.2.2.dev0-data4]"
]
| [
"testing/test_git.py::test_version_from_git",
"testing/test_git.py::test_git_worktree",
"testing/test_git.py::test_git_dirty_notag",
"testing/test_git.py::test_git_parse_shallow_warns",
"testing/test_git.py::test_git_parse_shallow_fail",
"testing/test_git.py::test_git_shallow_autocorrect",
"testing/test_git.py::test_find_files_stop_at_root_git",
"testing/test_git.py::test_parse_no_worktree",
"testing/test_git.py::test_alphanumeric_tags_match",
"testing/test_mercurial.py::test_archival_to_version[0.0-data0]",
"testing/test_mercurial.py::test_archival_to_version[1.0-data1]",
"testing/test_mercurial.py::test_archival_to_version[1.2.2-data3]",
"testing/test_mercurial.py::test_find_files_stop_at_root_hg",
"testing/test_mercurial.py::test_version_from_hg_id",
"testing/test_mercurial.py::test_version_in_merge",
"testing/test_mercurial.py::test_parse_no_worktree",
"testing/test_regressions.py::test_pkginfo_noscmroot",
"testing/test_regressions.py::test_pip_egg_info",
"testing/test_regressions.py::test_use_scm_version_callable"
]
| []
| MIT License | 1,164 | [
"README.rst",
"setuptools_scm/version.py",
".hgtags",
"CHANGELOG.rst",
".travis.yml",
"setuptools_scm/hg.py",
"setup.cfg",
"setuptools_scm/git.py"
]
| [
"README.rst",
"setuptools_scm/version.py",
".hgtags",
"CHANGELOG.rst",
".travis.yml",
"setuptools_scm/hg.py",
"setup.cfg",
"setuptools_scm/git.py"
]
|
|
pypa__setuptools_scm-168 | 00f3fbe0dfd3ae396abdd4b33cd69cca7d4459c3 | 2017-04-08 16:14:53 | 0373c11d2c8968a857ff06c94f101abebf825507 | diff --git a/CHANGELOG.rst b/CHANGELOG.rst
index 50f0cde..054bfe5 100644
--- a/CHANGELOG.rst
+++ b/CHANGELOG.rst
@@ -1,3 +1,9 @@
+v1.15.5
+=======
+
+* fix #167 by correctly respecting preformatted version metadata
+ from PKG-INFO/EGG-INFO
+
v1.15.4
=======
diff --git a/setuptools_scm/hacks.py b/setuptools_scm/hacks.py
index 2d298be..add89a8 100644
--- a/setuptools_scm/hacks.py
+++ b/setuptools_scm/hacks.py
@@ -10,7 +10,7 @@ def parse_pkginfo(root):
data = data_from_mime(pkginfo)
version = data.get('Version')
if version != 'UNKNOWN':
- return meta(version)
+ return meta(version, preformatted=True)
def parse_pip_egg_info(root):
diff --git a/setuptools_scm/version.py b/setuptools_scm/version.py
index 15c4495..dce8c75 100644
--- a/setuptools_scm/version.py
+++ b/setuptools_scm/version.py
@@ -55,7 +55,10 @@ def tags_to_versions(tags):
class ScmVersion(object):
def __init__(self, tag_version,
distance=None, node=None, dirty=False,
+ preformatted=False,
**kw):
+ if kw:
+ trace("unknown args", kw)
self.tag = tag_version
if dirty and distance is None:
distance = 0
@@ -64,6 +67,7 @@ class ScmVersion(object):
self.time = datetime.datetime.now()
self.extra = kw
self.dirty = dirty
+ self.preformatted = preformatted
@property
def exact(self):
@@ -84,13 +88,19 @@ class ScmVersion(object):
return self.format_with(dirty_format if self.dirty else clean_format)
-def meta(tag, distance=None, dirty=False, node=None, **kw):
+def _parse_tag(tag, preformatted):
+ if preformatted:
+ return tag
if SetuptoolsVersion is None or not isinstance(tag, SetuptoolsVersion):
tag = tag_to_version(tag)
- trace('version', tag)
+ return tag
+
+def meta(tag, distance=None, dirty=False, node=None, preformatted=False, **kw):
+ tag = _parse_tag(tag, preformatted)
+ trace('version', tag)
assert tag is not None, 'cant parse version %s' % tag
- return ScmVersion(tag, distance, node, dirty, **kw)
+ return ScmVersion(tag, distance, node, dirty, preformatted, **kw)
def guess_next_version(tag_version, distance):
@@ -147,6 +157,8 @@ def postrelease_version(version):
def format_version(version, **config):
trace('scm version', version)
trace('config', config)
+ if version.preformatted:
+ return version.tag
version_scheme = callable_or_entrypoint(
'setuptools_scm.version_scheme', config['version_scheme'])
local_scheme = callable_or_entrypoint(
| setuptools_scm unable to compute distance?
I have a tarball that was built with a previous version of setuptools_scm. The other day it started failing because the version number it was producing was ``1.None.0``. Nothing in the code base had changed, so I believe it to be a setuptools_scm regression.
This tarball is private, but the ``setup.py`` looks like:
```python
#!/usr/bin/env python
import setuptools
setuptools.setup(
name="pypi-theme",
version="15.0",
packages=[
"pypi_theme",
],
include_package_data=True,
use_scm_version={
"local_scheme": "dirty-tag",
"version_scheme": lambda v: "1.{.distance}.0".format(v),
},
install_requires=[
"Pyramid",
],
setup_requires=["setuptools_scm"],
)
```
I'm guessing that ``v.distance`` is returning ``None`` now when it previously didn't, but I don't know why. | pypa/setuptools_scm | diff --git a/testing/conftest.py b/testing/conftest.py
index 49a9d14..29e129c 100644
--- a/testing/conftest.py
+++ b/testing/conftest.py
@@ -52,14 +52,18 @@ class Wd(object):
self(self.add_command)
self.commit(reason=reason)
- @property
- def version(self):
+ def get_version(self, **kw):
__tracebackhide__ = True
from setuptools_scm import get_version
- version = get_version(root=str(self.cwd))
+ version = get_version(root=str(self.cwd), **kw)
print(version)
return version
+ @property
+ def version(self):
+ __tracebackhide__ = True
+ return self.get_version()
+
@pytest.yield_fixture(autouse=True)
def debug_mode():
diff --git a/testing/test_basic_api.py b/testing/test_basic_api.py
index 5f9e1d6..4192f71 100644
--- a/testing/test_basic_api.py
+++ b/testing/test_basic_api.py
@@ -30,6 +30,9 @@ def test_version_from_pkginfo(wd):
wd.write('PKG-INFO', 'Version: 0.1')
assert wd.version == '0.1'
+ # replicate issue 167
+ assert wd.get_version(version_scheme="1.{0.distance}.0".format) == '0.1'
+
def assert_root(monkeypatch, expected_root):
"""
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 1,
"test_score": 0
},
"num_modified_files": 3
} | 1.15 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"flake8",
"mccabe",
"readme",
"wheel",
"twine",
"devpi-client"
],
"pre_install": null,
"python": "3.6",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
bleach==4.1.0
build==0.9.0
certifi==2021.5.30
cffi==1.15.1
charset-normalizer==2.0.12
check-manifest==0.48
colorama==0.4.5
cryptography==40.0.2
devpi-client==6.0.7
devpi-common==3.7.2
docutils==0.18.1
flake8==5.0.4
idna==3.10
importlib-metadata==4.2.0
importlib-resources==5.4.0
iniconfig==1.1.1
jeepney==0.7.1
keyring==23.4.1
lazy==1.6
mccabe==0.7.0
packaging==21.3
pep517==0.13.1
pkginfo==1.10.0
platformdirs==2.4.0
pluggy==1.0.0
py==1.11.0
pycodestyle==2.9.1
pycparser==2.21
pyflakes==2.5.0
Pygments==2.14.0
pyparsing==3.1.4
pytest==7.0.1
readme==0.7.1
readme-renderer==34.0
requests==2.27.1
requests-toolbelt==1.0.0
rfc3986==1.5.0
SecretStorage==3.3.3
-e git+https://github.com/pypa/setuptools_scm.git@00f3fbe0dfd3ae396abdd4b33cd69cca7d4459c3#egg=setuptools_scm
six==1.17.0
tomli==1.2.3
tqdm==4.64.1
twine==3.8.0
typing_extensions==4.1.1
urllib3==1.26.20
webencodings==0.5.1
zipp==3.6.0
| name: setuptools_scm
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- bleach==4.1.0
- build==0.9.0
- cffi==1.15.1
- charset-normalizer==2.0.12
- check-manifest==0.48
- colorama==0.4.5
- cryptography==40.0.2
- devpi-client==6.0.7
- devpi-common==3.7.2
- docutils==0.18.1
- flake8==5.0.4
- idna==3.10
- importlib-metadata==4.2.0
- importlib-resources==5.4.0
- iniconfig==1.1.1
- jeepney==0.7.1
- keyring==23.4.1
- lazy==1.6
- mccabe==0.7.0
- packaging==21.3
- pep517==0.13.1
- pkginfo==1.10.0
- platformdirs==2.4.0
- pluggy==1.0.0
- py==1.11.0
- pycodestyle==2.9.1
- pycparser==2.21
- pyflakes==2.5.0
- pygments==2.14.0
- pyparsing==3.1.4
- pytest==7.0.1
- readme==0.7.1
- readme-renderer==34.0
- requests==2.27.1
- requests-toolbelt==1.0.0
- rfc3986==1.5.0
- secretstorage==3.3.3
- six==1.17.0
- tomli==1.2.3
- tqdm==4.64.1
- twine==3.8.0
- typing-extensions==4.1.1
- urllib3==1.26.20
- webencodings==0.5.1
- zipp==3.6.0
prefix: /opt/conda/envs/setuptools_scm
| [
"testing/test_basic_api.py::test_version_from_pkginfo"
]
| []
| [
"testing/test_basic_api.py::test_do[ls]",
"testing/test_basic_api.py::test_do[dir]",
"testing/test_basic_api.py::test_data_from_mime",
"testing/test_basic_api.py::test_root_parameter_creation",
"testing/test_basic_api.py::test_root_parameter_pass_by",
"testing/test_basic_api.py::test_pretended",
"testing/test_basic_api.py::test_root_relative_to",
"testing/test_basic_api.py::test_dump_version",
"testing/test_basic_api.py::test_parse_plain"
]
| []
| MIT License | 1,166 | [
"setuptools_scm/version.py",
"CHANGELOG.rst",
"setuptools_scm/hacks.py"
]
| [
"setuptools_scm/version.py",
"CHANGELOG.rst",
"setuptools_scm/hacks.py"
]
|
|
RDFLib__rdflib-730 | af230076e7796c368ec4c912404fe3baf44761e5 | 2017-04-09 17:25:59 | 39d07c4a5c9395f1322a269982e69b63cbf4db22 | diff --git a/rdflib/compare.py b/rdflib/compare.py
index 5e3f5994..97de047b 100644
--- a/rdflib/compare.py
+++ b/rdflib/compare.py
@@ -194,6 +194,10 @@ class Color:
self.hashfunc = hashfunc
self._hash_color = None
+ def __str__(self):
+ nodes, color = self.key()
+ return "Color %s (%s nodes)" % (color, nodes)
+
def key(self):
return (len(self.nodes), self.hash_color())
@@ -277,7 +281,7 @@ class _TripleCanonicalizer(object):
others = set()
self._neighbors = defaultdict(set)
for s, p, o in self.graph:
- nodes = set([s, o])
+ nodes = set([s, p, o])
b = set([x for x in nodes if isinstance(x, BNode)])
if len(b) > 0:
others |= nodes - b
@@ -286,6 +290,9 @@ class _TripleCanonicalizer(object):
self._neighbors[s].add(o)
if isinstance(o, BNode):
self._neighbors[o].add(s)
+ if isinstance(p, BNode):
+ self._neighbors[p].add(s)
+ self._neighbors[p].add(p)
if len(bnodes) > 0:
return [
Color(list(bnodes), self.hashfunc, hash_cache=self._hash_cache)
@@ -317,7 +324,7 @@ class _TripleCanonicalizer(object):
while len(sequence) > 0 and not self._discrete(coloring):
W = sequence.pop()
for c in coloring[:]:
- if len(c.nodes) > 1:
+ if len(c.nodes) > 1 or isinstance(c.nodes[0], BNode):
colors = sorted(c.distinguish(W, self.graph),
key=lambda x: x.key(),
reverse=True)
@@ -328,8 +335,17 @@ class _TripleCanonicalizer(object):
sequence = sequence[:si] + colors + sequence[si+1:]
except ValueError:
sequence = colors[1:] + sequence
-
- return coloring
+ combined_colors = []
+ combined_color_map = dict()
+ for color in coloring:
+ color_hash = color.hash_color()
+ # This is a hash collision, and be combined into a single color for individuation.
+ if color_hash in combined_color_map:
+ combined_color_map[color_hash].nodes.extend(color.nodes)
+ else:
+ combined_colors.append(color)
+ combined_color_map[color_hash] = color
+ return combined_colors
@_runtime("to_hash_runtime")
def to_hash(self, stats=None):
@@ -515,14 +531,14 @@ def isomorphic(graph1, graph2):
-def to_canonical_graph(g1):
+def to_canonical_graph(g1, stats=None):
"""Creates a canonical, read-only graph.
Creates a canonical, read-only graph where all bnode id:s are based on
deterministical SHA-256 checksums, correlated with the graph contents.
"""
graph = Graph()
- graph += _TripleCanonicalizer(g1).canonical_triples()
+ graph += _TripleCanonicalizer(g1).canonical_triples(stats=stats)
return ReadOnlyGraphAggregate([graph])
| RGDA1 graph canonicalization sometimes still collapses distinct BNodes
During the [evaluation of my graph pattern learner](https://github.com/RDFLib/graph-pattern-learner/blob/master/eval.py#L433) i'm currently trying to generate all possible (different) SPARQL BGPs of a given length (5 at the moment). With up to 11 variables, enumerating all of those graphs might be stretching it a bit, but i'm nearly there. However, to do this, i need the canonical representations of SPARQL BGPs. As discussed before (#483), i'm reducing SPARQL BGPs (and especially their variables) to RDF graphs with BNodes (see [here](https://github.com/RDFLib/graph-pattern-learner/blob/master/graph_pattern.py#L189) if interested), then run RGDA1 on it, and map the canonical BNode labels to the SPARQL Variables.
Similarly to #494, I noticed that sometimes during this process i still lose nodes. Minimal test-case below (PR with test will follow):
```python
g = rdflib.Graph()
g += [
(rdflib.term.BNode('N0a76d42406b84fe4b8029d0a7fa04244'),
rdflib.term.URIRef(u'http://www.w3.org/1999/02/22-rdf-syntax-ns#object'),
rdflib.term.BNode('v2')),
(rdflib.term.BNode('N0a76d42406b84fe4b8029d0a7fa04244'),
rdflib.term.URIRef(u'http://www.w3.org/1999/02/22-rdf-syntax-ns#predicate'),
rdflib.term.BNode('v0')),
(rdflib.term.BNode('N0a76d42406b84fe4b8029d0a7fa04244'),
rdflib.term.URIRef(u'http://www.w3.org/1999/02/22-rdf-syntax-ns#subject'),
rdflib.term.URIRef(u'urn:gp_learner:fixed_var:target')),
(rdflib.term.BNode('N0a76d42406b84fe4b8029d0a7fa04244'),
rdflib.term.URIRef(u'http://www.w3.org/1999/02/22-rdf-syntax-ns#type'),
rdflib.term.URIRef(u'http://www.w3.org/1999/02/22-rdf-syntax-ns#Statement')),
(rdflib.term.BNode('N2f62af5936b94a8eb4b1e4bfa8e11d95'),
rdflib.term.URIRef(u'http://www.w3.org/1999/02/22-rdf-syntax-ns#object'),
rdflib.term.BNode('v1')),
(rdflib.term.BNode('N2f62af5936b94a8eb4b1e4bfa8e11d95'),
rdflib.term.URIRef(u'http://www.w3.org/1999/02/22-rdf-syntax-ns#predicate'),
rdflib.term.BNode('v0')),
(rdflib.term.BNode('N2f62af5936b94a8eb4b1e4bfa8e11d95'),
rdflib.term.URIRef(u'http://www.w3.org/1999/02/22-rdf-syntax-ns#subject'),
rdflib.term.URIRef(u'urn:gp_learner:fixed_var:target')),
(rdflib.term.BNode('N2f62af5936b94a8eb4b1e4bfa8e11d95'),
rdflib.term.URIRef(u'http://www.w3.org/1999/02/22-rdf-syntax-ns#type'),
rdflib.term.URIRef(u'http://www.w3.org/1999/02/22-rdf-syntax-ns#Statement')),
(rdflib.term.BNode('N5ae541f93e1d4e5880450b1bdceb6404'),
rdflib.term.URIRef(u'http://www.w3.org/1999/02/22-rdf-syntax-ns#object'),
rdflib.term.BNode('v5')),
(rdflib.term.BNode('N5ae541f93e1d4e5880450b1bdceb6404'),
rdflib.term.URIRef(u'http://www.w3.org/1999/02/22-rdf-syntax-ns#predicate'),
rdflib.term.BNode('v4')),
(rdflib.term.BNode('N5ae541f93e1d4e5880450b1bdceb6404'),
rdflib.term.URIRef(u'http://www.w3.org/1999/02/22-rdf-syntax-ns#subject'),
rdflib.term.URIRef(u'urn:gp_learner:fixed_var:target')),
(rdflib.term.BNode('N5ae541f93e1d4e5880450b1bdceb6404'),
rdflib.term.URIRef(u'http://www.w3.org/1999/02/22-rdf-syntax-ns#type'),
rdflib.term.URIRef(u'http://www.w3.org/1999/02/22-rdf-syntax-ns#Statement')),
(rdflib.term.BNode('N86ac7ca781f546ae939b8963895f672e'),
rdflib.term.URIRef(u'http://www.w3.org/1999/02/22-rdf-syntax-ns#object'),
rdflib.term.URIRef(u'urn:gp_learner:fixed_var:source')),
(rdflib.term.BNode('N86ac7ca781f546ae939b8963895f672e'),
rdflib.term.URIRef(u'http://www.w3.org/1999/02/22-rdf-syntax-ns#predicate'),
rdflib.term.BNode('v0')),
(rdflib.term.BNode('N86ac7ca781f546ae939b8963895f672e'),
rdflib.term.URIRef(u'http://www.w3.org/1999/02/22-rdf-syntax-ns#subject'),
rdflib.term.URIRef(u'urn:gp_learner:fixed_var:target')),
(rdflib.term.BNode('N86ac7ca781f546ae939b8963895f672e'),
rdflib.term.URIRef(u'http://www.w3.org/1999/02/22-rdf-syntax-ns#type'),
rdflib.term.URIRef(u'http://www.w3.org/1999/02/22-rdf-syntax-ns#Statement')),
(rdflib.term.BNode('Nac82b883ca3849b5ab6820b7ac15e490'),
rdflib.term.URIRef(u'http://www.w3.org/1999/02/22-rdf-syntax-ns#object'),
rdflib.term.BNode('v1')),
(rdflib.term.BNode('Nac82b883ca3849b5ab6820b7ac15e490'),
rdflib.term.URIRef(u'http://www.w3.org/1999/02/22-rdf-syntax-ns#predicate'),
rdflib.term.BNode('v3')),
(rdflib.term.BNode('Nac82b883ca3849b5ab6820b7ac15e490'),
rdflib.term.URIRef(u'http://www.w3.org/1999/02/22-rdf-syntax-ns#subject'),
rdflib.term.URIRef(u'urn:gp_learner:fixed_var:target')),
(rdflib.term.BNode('Nac82b883ca3849b5ab6820b7ac15e490'),
rdflib.term.URIRef(u'http://www.w3.org/1999/02/22-rdf-syntax-ns#type'),
rdflib.term.URIRef(u'http://www.w3.org/1999/02/22-rdf-syntax-ns#Statement'))]
cg = rdflib.compare.to_canonical_graph(g)
```
for `g` we will get the following "stats":
```python
print 'graph length: %d, nodes: %d' % (len(g), len(g.all_nodes()))
print 'triple_bnode degrees:'
for triple_bnode in g.subjects(rdflib.RDF['type'], rdflib.RDF['Statement']):
print len(list(g.triples([triple_bnode, None, None])))
print 'all node out-degrees:'
print sorted([len(list(g.triples([node, None, None]))) for node in g.all_nodes()])
print 'all node in-degrees:'
print sorted([len(list(g.triples([None, None, node]))) for node in g.all_nodes()])
```
> output:
> ```
> graph length: 20, nodes: 14
> triple_bnode degrees:
> 4
> 4
> 4
> 4
> 4
> all node out-degrees:
> [0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 4, 4, 4]
> all node in-degrees:
> [0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 2, 3, 5, 5]
> ```
for `cg` we'll get the following:
```python
print 'graph length: %d, nodes: %d' % (len(cg), len(cg.all_nodes()))
print 'triple_bnode degrees:'
for triple_bnode in cg.subjects(rdflib.RDF['type'], rdflib.RDF['Statement']):
print len(list(cg.triples([triple_bnode, None, None])))
print 'all node out-degrees:'
print sorted([len(list(cg.triples([node, None, None]))) for node in cg.all_nodes()])
print 'all node in-degrees:'
print sorted([len(list(cg.triples([None, None, node]))) for node in cg.all_nodes()])
```
> output:
> ```
> graph length: 20, nodes: 13
> triple_bnode degrees:
> 4
> 4
> 4
> 4
> 4
> all node out-degrees:
> [0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 4, 4, 4]
> all node in-degrees:
> [0, 0, 0, 0, 0, 1, 1, 1, 1, 3, 3, 5, 5]
> ```
@jimmccusker could you maybe have another look? | RDFLib/rdflib | diff --git a/test/test_canonicalization.py b/test/test_canonicalization.py
index b64059e7..515756a4 100644
--- a/test/test_canonicalization.py
+++ b/test/test_canonicalization.py
@@ -1,6 +1,9 @@
from rdflib import Graph, RDF, BNode, URIRef, Namespace, ConjunctiveGraph, Literal
from rdflib.compare import to_isomorphic, to_canonical_graph
+
+import rdflib
from rdflib.plugins.memory import IOMemory
+
from six import text_type
from io import StringIO
@@ -154,6 +157,130 @@ def negative_graph_match_test():
for inputs in testInputs:
yield fn, inputs[0], inputs[1], inputs[2]
+def test_issue725_collapsing_bnodes():
+ g = rdflib.Graph()
+ g += [
+ (rdflib.term.BNode('N0a76d42406b84fe4b8029d0a7fa04244'),
+ rdflib.term.URIRef(u'http://www.w3.org/1999/02/22-rdf-syntax-ns#object'),
+ rdflib.term.BNode('v2')),
+ (rdflib.term.BNode('N0a76d42406b84fe4b8029d0a7fa04244'),
+ rdflib.term.URIRef(u'http://www.w3.org/1999/02/22-rdf-syntax-ns#predicate'),
+ rdflib.term.BNode('v0')),
+ (rdflib.term.BNode('N0a76d42406b84fe4b8029d0a7fa04244'),
+ rdflib.term.URIRef(u'http://www.w3.org/1999/02/22-rdf-syntax-ns#subject'),
+ rdflib.term.URIRef(u'urn:gp_learner:fixed_var:target')),
+ (rdflib.term.BNode('N0a76d42406b84fe4b8029d0a7fa04244'),
+ rdflib.term.URIRef(u'http://www.w3.org/1999/02/22-rdf-syntax-ns#type'),
+ rdflib.term.URIRef(u'http://www.w3.org/1999/02/22-rdf-syntax-ns#Statement')),
+ (rdflib.term.BNode('N2f62af5936b94a8eb4b1e4bfa8e11d95'),
+ rdflib.term.URIRef(u'http://www.w3.org/1999/02/22-rdf-syntax-ns#object'),
+ rdflib.term.BNode('v1')),
+ (rdflib.term.BNode('N2f62af5936b94a8eb4b1e4bfa8e11d95'),
+ rdflib.term.URIRef(u'http://www.w3.org/1999/02/22-rdf-syntax-ns#predicate'),
+ rdflib.term.BNode('v0')),
+ (rdflib.term.BNode('N2f62af5936b94a8eb4b1e4bfa8e11d95'),
+ rdflib.term.URIRef(u'http://www.w3.org/1999/02/22-rdf-syntax-ns#subject'),
+ rdflib.term.URIRef(u'urn:gp_learner:fixed_var:target')),
+ (rdflib.term.BNode('N2f62af5936b94a8eb4b1e4bfa8e11d95'),
+ rdflib.term.URIRef(u'http://www.w3.org/1999/02/22-rdf-syntax-ns#type'),
+ rdflib.term.URIRef(u'http://www.w3.org/1999/02/22-rdf-syntax-ns#Statement')),
+ (rdflib.term.BNode('N5ae541f93e1d4e5880450b1bdceb6404'),
+ rdflib.term.URIRef(u'http://www.w3.org/1999/02/22-rdf-syntax-ns#object'),
+ rdflib.term.BNode('v5')),
+ (rdflib.term.BNode('N5ae541f93e1d4e5880450b1bdceb6404'),
+ rdflib.term.URIRef(u'http://www.w3.org/1999/02/22-rdf-syntax-ns#predicate'),
+ rdflib.term.BNode('v4')),
+ (rdflib.term.BNode('N5ae541f93e1d4e5880450b1bdceb6404'),
+ rdflib.term.URIRef(u'http://www.w3.org/1999/02/22-rdf-syntax-ns#subject'),
+ rdflib.term.URIRef(u'urn:gp_learner:fixed_var:target')),
+ (rdflib.term.BNode('N5ae541f93e1d4e5880450b1bdceb6404'),
+ rdflib.term.URIRef(u'http://www.w3.org/1999/02/22-rdf-syntax-ns#type'),
+ rdflib.term.URIRef(u'http://www.w3.org/1999/02/22-rdf-syntax-ns#Statement')),
+ (rdflib.term.BNode('N86ac7ca781f546ae939b8963895f672e'),
+ rdflib.term.URIRef(u'http://www.w3.org/1999/02/22-rdf-syntax-ns#object'),
+ rdflib.term.URIRef(u'urn:gp_learner:fixed_var:source')),
+ (rdflib.term.BNode('N86ac7ca781f546ae939b8963895f672e'),
+ rdflib.term.URIRef(u'http://www.w3.org/1999/02/22-rdf-syntax-ns#predicate'),
+ rdflib.term.BNode('v0')),
+ (rdflib.term.BNode('N86ac7ca781f546ae939b8963895f672e'),
+ rdflib.term.URIRef(u'http://www.w3.org/1999/02/22-rdf-syntax-ns#subject'),
+ rdflib.term.URIRef(u'urn:gp_learner:fixed_var:target')),
+ (rdflib.term.BNode('N86ac7ca781f546ae939b8963895f672e'),
+ rdflib.term.URIRef(u'http://www.w3.org/1999/02/22-rdf-syntax-ns#type'),
+ rdflib.term.URIRef(u'http://www.w3.org/1999/02/22-rdf-syntax-ns#Statement')),
+ (rdflib.term.BNode('Nac82b883ca3849b5ab6820b7ac15e490'),
+ rdflib.term.URIRef(u'http://www.w3.org/1999/02/22-rdf-syntax-ns#object'),
+ rdflib.term.BNode('v1')),
+ (rdflib.term.BNode('Nac82b883ca3849b5ab6820b7ac15e490'),
+ rdflib.term.URIRef(u'http://www.w3.org/1999/02/22-rdf-syntax-ns#predicate'),
+ rdflib.term.BNode('v3')),
+ (rdflib.term.BNode('Nac82b883ca3849b5ab6820b7ac15e490'),
+ rdflib.term.URIRef(u'http://www.w3.org/1999/02/22-rdf-syntax-ns#subject'),
+ rdflib.term.URIRef(u'urn:gp_learner:fixed_var:target')),
+ (rdflib.term.BNode('Nac82b883ca3849b5ab6820b7ac15e490'),
+ rdflib.term.URIRef(u'http://www.w3.org/1999/02/22-rdf-syntax-ns#type'),
+ rdflib.term.URIRef(u'http://www.w3.org/1999/02/22-rdf-syntax-ns#Statement'))
+ ]
+
+ turtle = '''
+@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
+@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
+@prefix xml: <http://www.w3.org/XML/1998/namespace> .
+@prefix xsd: <http://www.w3.org/2001/XMLSchema#> .
+
+[] a rdf:Statement ;
+ rdf:object [ ] ;
+ rdf:predicate _:v0 ;
+ rdf:subject <urn:gp_learner:fixed_var:target> .
+
+[] a rdf:Statement ;
+ rdf:object _:v1 ;
+ rdf:predicate _:v0 ;
+ rdf:subject <urn:gp_learner:fixed_var:target> .
+
+[] a rdf:Statement ;
+ rdf:object [ ] ;
+ rdf:predicate [ ] ;
+ rdf:subject <urn:gp_learner:fixed_var:target> .
+
+[] a rdf:Statement ;
+ rdf:object <urn:gp_learner:fixed_var:source> ;
+ rdf:predicate _:v0 ;
+ rdf:subject <urn:gp_learner:fixed_var:target> .
+
+[] a rdf:Statement ;
+ rdf:object _:v1 ;
+ rdf:predicate [ ] ;
+ rdf:subject <urn:gp_learner:fixed_var:target> .'''
+
+ #g = Graph()
+ #g.parse(data=turtle, format='turtle')
+
+ stats = {}
+ cg = rdflib.compare.to_canonical_graph(g, stats=stats)
+
+ print ('graph g length: %d, nodes: %d' % (len(g), len(g.all_nodes())))
+ print ('triple_bnode degrees:')
+ for triple_bnode in g.subjects(rdflib.RDF['type'], rdflib.RDF['Statement']):
+ print (len(list(g.triples([triple_bnode, None, None]))))
+ print ('all node out-degrees:')
+ print (sorted([len(list(g.triples([node, None, None]))) for node in g.all_nodes()]))
+ print ('all node in-degrees:')
+ print (sorted([len(list(g.triples([None, None, node]))) for node in g.all_nodes()]))
+ print(g.serialize(format='n3'))
+
+ print ('graph cg length: %d, nodes: %d' % (len(cg), len(cg.all_nodes())))
+ print ('triple_bnode degrees:')
+ for triple_bnode in cg.subjects(rdflib.RDF['type'], rdflib.RDF['Statement']):
+ print (len(list(cg.triples([triple_bnode, None, None]))))
+ print ('all node out-degrees:')
+ print (sorted([len(list(cg.triples([node, None, None]))) for node in cg.all_nodes()]))
+ print ('all node in-degrees:')
+ print (sorted([len(list(cg.triples([None, None, node]))) for node in cg.all_nodes()]))
+ print(cg.serialize(format='n3'))
+
+ assert(len(g.all_nodes()) == len(cg.all_nodes()))
+
def test_issue494_collapsing_bnodes():
"""Test for https://github.com/RDFLib/rdflib/issues/494 collapsing BNodes"""
g = Graph()
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_hyperlinks",
"has_issue_reference",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 2
},
"num_modified_files": 1
} | 4.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .[dev]",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"nose",
"pytest"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.6",
"reqs_path": [
"requirements.txt"
],
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
doctest-ignore-unicode==0.1.2
flake8==5.0.4
html5lib==1.1
importlib-metadata==4.2.0
iniconfig==1.1.1
isodate==0.6.1
mccabe==0.7.0
nose==1.3.7
packaging==21.3
pluggy==1.0.0
py==1.11.0
pycodestyle==2.9.1
pyflakes==2.5.0
pyparsing==3.1.4
pytest==7.0.1
-e git+https://github.com/RDFLib/rdflib.git@af230076e7796c368ec4c912404fe3baf44761e5#egg=rdflib
six==1.17.0
SPARQLWrapper==1.8.5
tomli==1.2.3
typing_extensions==4.1.1
webencodings==0.5.1
zipp==3.6.0
| name: rdflib
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- doctest-ignore-unicode==0.1.2
- flake8==5.0.4
- html5lib==1.1
- importlib-metadata==4.2.0
- iniconfig==1.1.1
- isodate==0.6.1
- mccabe==0.7.0
- nose==1.3.7
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pycodestyle==2.9.1
- pyflakes==2.5.0
- pyparsing==3.1.4
- pytest==7.0.1
- six==1.17.0
- sparqlwrapper==1.8.5
- tomli==1.2.3
- typing-extensions==4.1.1
- webencodings==0.5.1
- zipp==3.6.0
prefix: /opt/conda/envs/rdflib
| [
"test/test_canonicalization.py::test_issue725_collapsing_bnodes"
]
| []
| [
"test/test_canonicalization.py::test_issue494_collapsing_bnodes",
"test/test_canonicalization.py::test_issue682_signing_named_graphs"
]
| []
| BSD 3-Clause "New" or "Revised" License | 1,169 | [
"rdflib/compare.py"
]
| [
"rdflib/compare.py"
]
|
|
google__budou-16 | cb842ec0a8348245626af79ea755a51b658f7b73 | 2017-04-10 09:19:05 | 39fa2ad179a2af4e5001101fb19e5450df2a6105 | diff --git a/budou/budou.py b/budou/budou.py
index da1a28f..f547f61 100644
--- a/budou/budou.py
+++ b/budou/budou.py
@@ -26,6 +26,7 @@ from oauth2client.client import GoogleCredentials
import oauth2client.service_account
import re
import six
+import unicodedata
Chunk = collections.namedtuple('Chunk', ['word', 'pos', 'label', 'forward'])
"""Word chunk object.
@@ -158,12 +159,42 @@ class Budou(object):
A list of Chunks.
"""
chunks = self._get_source_chunks(input_text, language)
+ chunks = self._update_punct_direction(chunks)
for forward in (True, False):
condition = lambda chunk: (
chunk.label in TARGET_LABEL or chunk.pos == 'PUNCT')
chunks = self._concatenate_inner(chunks, condition, forward)
return chunks
+ def _update_punct_direction(self, chunks):
+ """Updates chunk's concatenate direction if it is a punctuation mark.
+
+ Args:
+ chunks: A list of Chunks.
+
+ Returns:
+ A list of updated Chunks.
+ """
+ result = []
+ for chunk in chunks:
+ if chunk.pos == 'PUNCT':
+ forward = False
+ try:
+ # Getting unicode category to determine the direction.
+ # Concatenates to the following if it belongs to Ps or Pi category.
+ # Ps: Punctuation, open (e.g. opening bracket characters)
+ # Pi: Punctuation, initial quote (e.g. opening quotation mark)
+ # Otherwise, concatenates to the previous word.
+ # See also https://en.wikipedia.org/wiki/Unicode_character_property
+ category = unicodedata.category(chunk.word)
+ if category in ('Ps', 'Pi'):
+ forward = True
+ except:
+ pass
+ chunk = Chunk(chunk.word, chunk.pos, chunk.label, forward)
+ result.append(chunk)
+ return result
+
def _get_attribute_dict(self, attributes, classname=None):
"""Returns a dictionary of attribute name-value pairs.
| not proper grouping of comma (読点)
<img width="354" alt="default" src="https://cloud.githubusercontent.com/assets/968152/24842452/bcee80fa-1dd2-11e7-848a-86b683f9dab6.png">
in this case, comma (読点) should be grouped with former word (最終日).
<img width="357" alt="default" src="https://cloud.githubusercontent.com/assets/968152/24843094/48a93066-1dda-11e7-87f8-a684e52ebfc1.png">
<img width="389" alt="default" src="https://cloud.githubusercontent.com/assets/968152/24843125/a27b4160-1dda-11e7-9039-775f1542d099.png">
<img width="356" alt="default" src="https://cloud.githubusercontent.com/assets/968152/24843143/de3f7cf2-1dda-11e7-9ae7-320e84427d12.png">
<img width="364" alt="default" src="https://cloud.githubusercontent.com/assets/968152/24843153/f7d6d426-1dda-11e7-9c2a-82852ebf3d0d.png">
| google/budou | diff --git a/test/budou_test.py b/test/budou_test.py
index 8c3b30e..134487d 100644
--- a/test/budou_test.py
+++ b/test/budou_test.py
@@ -289,6 +289,36 @@ class TestBudouMethods(unittest.TestCase):
result, expected,
'Input text should be parsed into chunks separated by spaces.')
+ def test_update_punct_direction(self):
+ chunks = [
+ budou.Chunk(u'。', u'PUNCT', None, None),
+ budou.Chunk(u'、', u'PUNCT', None, None),
+ budou.Chunk(u'「', u'PUNCT', None, None),
+ budou.Chunk(u'」', u'PUNCT', None, None),
+ budou.Chunk(u'(', u'PUNCT', None, None),
+ budou.Chunk(u')', u'PUNCT', None, None),
+ budou.Chunk(u'[', u'PUNCT', None, None),
+ budou.Chunk(u']', u'PUNCT', None, None),
+ budou.Chunk(u'(', u'PUNCT', None, None),
+ budou.Chunk(u')', u'PUNCT', None, None),
+ ]
+ expected = [
+ budou.Chunk(u'。', u'PUNCT', None, False),
+ budou.Chunk(u'、', u'PUNCT', None, False),
+ budou.Chunk(u'「', u'PUNCT', None, True),
+ budou.Chunk(u'」', u'PUNCT', None, False),
+ budou.Chunk(u'(', u'PUNCT', None, True),
+ budou.Chunk(u')', u'PUNCT', None, False),
+ budou.Chunk(u'[', u'PUNCT', None, True),
+ budou.Chunk(u']', u'PUNCT', None, False),
+ budou.Chunk(u'(', u'PUNCT', None, True),
+ budou.Chunk(u')', u'PUNCT', None, False),
+ ]
+ result = self.parser._update_punct_direction(chunks)
+ self.assertEqual(
+ result, expected,
+ 'Punctuation mark\'s concatenating direction should be determined.')
+
if __name__ == '__main__':
unittest.main()
| {
"commit_name": "head_commit",
"failed_lite_validators": [
"has_short_problem_statement",
"has_hyperlinks",
"has_media"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 0,
"issue_text_score": 3,
"test_score": 2
},
"num_modified_files": 1
} | 0.2 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "google-api-python-client oauth2client lxml>=3.6.1 six",
"pip_packages": [
"pytest",
"mock"
],
"pre_install": [
"apt-get update",
"apt-get install -y gcc"
],
"python": "3.9",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | aiohappyeyeballs @ file:///croot/aiohappyeyeballs_1734469393482/work
aiohttp @ file:///croot/aiohttp_1734687138658/work
aiosignal @ file:///tmp/build/80754af9/aiosignal_1637843061372/work
async-timeout @ file:///croot/async-timeout_1732661977001/work
attrs @ file:///croot/attrs_1734533101012/work
Brotli @ file:///croot/brotli-split_1736182456865/work
-e git+https://github.com/google/budou.git@cb842ec0a8348245626af79ea755a51b658f7b73#egg=budou
cachetools @ file:///croot/cachetools_1738224668060/work
certifi @ file:///croot/certifi_1738623731865/work/certifi
cffi @ file:///croot/cffi_1736182485317/work
charset-normalizer @ file:///croot/charset-normalizer_1721748349566/work
cryptography @ file:///croot/cryptography_1740577825284/work
exceptiongroup==1.2.2
frozenlist @ file:///croot/frozenlist_1730902802621/work
google-api-core @ file:///croot/google-api-core-split_1742926780480/work
google-api-python-client @ file:///home/conda/feedstock_root/build_artifacts/google-api-python-client_1742419676831/work
google-auth @ file:///croot/google-auth_1741275169457/work
google-auth-httplib2 @ file:///home/conda/feedstock_root/build_artifacts/google-auth-httplib2_1733907410727/work
googleapis-common-protos @ file:///croot/googleapis-common-protos-feedstock_1742588908677/work
httplib2 @ file:///home/conda/feedstock_root/build_artifacts/httplib2_1733927481809/work
idna @ file:///croot/idna_1714398848350/work
iniconfig==2.1.0
lxml @ file:///croot/lxml_1737039601731/work
mock==5.2.0
multidict @ file:///croot/multidict_1730905498140/work
oauth2client @ file:///home/conda/feedstock_root/build_artifacts/oauth2client_1730205451224/work
packaging==24.2
pluggy==1.5.0
propcache @ file:///croot/propcache_1732303986938/work
proto-plus @ file:///croot/proto-plus_1742829342502/work
protobuf @ file:///croot/protobuf_1742419705443/work/bazel-bin/python/dist/protobuf-5.29.3-cp39-abi3-linux_x86_64.whl#sha256=99e7f032b8e72c85dbc84c9a250d19a4af2aeca12410c10349432035adb74ba0
pyasn1 @ file:///croot/pyasn1_1729239786406/work
pyasn1_modules @ file:///home/conda/feedstock_root/build_artifacts/pyasn1-modules_1733324602540/work
pycparser @ file:///tmp/build/80754af9/pycparser_1636541352034/work
pyOpenSSL @ file:///croot/pyopenssl_1741343803032/work
pyparsing @ file:///croot/pyparsing_1731445506121/work
PySocks @ file:///tmp/build/80754af9/pysocks_1605305812635/work
pytest==8.3.5
requests @ file:///croot/requests_1730999120400/work
rsa @ file:///tmp/build/80754af9/rsa_1614366226499/work
six @ file:///tmp/build/80754af9/six_1644875935023/work
tomli==2.2.1
typing_extensions @ file:///croot/typing_extensions_1734714854207/work
uritemplate @ file:///home/conda/feedstock_root/build_artifacts/uritemplate_1733927878495/work
urllib3 @ file:///croot/urllib3_1737133630106/work
yarl @ file:///croot/yarl_1732546845924/work
| name: budou
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- aiohappyeyeballs=2.4.4=py39h06a4308_0
- aiohttp=3.11.10=py39h5eee18b_0
- aiosignal=1.2.0=pyhd3eb1b0_0
- async-timeout=5.0.1=py39h06a4308_0
- attrs=24.3.0=py39h06a4308_0
- brotli-python=1.0.9=py39h6a678d5_9
- ca-certificates=2025.2.25=h06a4308_0
- cachetools=5.5.1=py39h06a4308_0
- certifi=2025.1.31=py39h06a4308_0
- cffi=1.17.1=py39h1fdaa30_1
- charset-normalizer=3.3.2=pyhd3eb1b0_0
- cryptography=44.0.1=py39h7825ff9_0
- frozenlist=1.5.0=py39h5eee18b_0
- google-api-core=2.24.2=py39h06a4308_0
- google-api-python-client=2.165.0=pyhff2d567_0
- google-auth=2.38.0=py39h06a4308_1
- google-auth-httplib2=0.2.0=pyhd8ed1ab_1
- googleapis-common-protos=1.69.2=py39h06a4308_0
- httplib2=0.22.0=pyhd8ed1ab_1
- icu=73.1=h6a678d5_0
- idna=3.7=py39h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libabseil=20250127.0=cxx17_h6a678d5_0
- libffi=3.4.4=h6a678d5_1
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- libxml2=2.13.5=hfdd30dd_0
- libxslt=1.1.41=h097e994_0
- lxml=5.3.0=py39h57af460_1
- multidict=6.1.0=py39h5eee18b_0
- ncurses=6.4=h6a678d5_0
- oauth2client=4.1.3=pyhd8ed1ab_1
- openssl=3.0.16=h5eee18b_0
- pip=25.0=py39h06a4308_0
- propcache=0.2.0=py39h5eee18b_0
- proto-plus=1.26.1=py39h06a4308_0
- protobuf=5.29.3=py39he621ea3_0
- pyasn1=0.6.1=py39h06a4308_0
- pyasn1-modules=0.4.1=pyhd8ed1ab_1
- pycparser=2.21=pyhd3eb1b0_0
- pyopenssl=25.0.0=py39h06a4308_0
- pyparsing=3.2.0=py39h06a4308_0
- pysocks=1.7.1=py39h06a4308_0
- python=3.9.21=he870216_1
- readline=8.2=h5eee18b_0
- requests=2.32.3=py39h06a4308_1
- rsa=4.7.2=pyhd3eb1b0_1
- setuptools=75.8.0=py39h06a4308_0
- six=1.16.0=pyhd3eb1b0_1
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- typing-extensions=4.12.2=py39h06a4308_0
- typing_extensions=4.12.2=py39h06a4308_0
- tzdata=2025a=h04d1e81_0
- uritemplate=4.1.1=pyhd8ed1ab_1
- urllib3=2.3.0=py39h06a4308_0
- wheel=0.45.1=py39h06a4308_0
- xz=5.6.4=h5eee18b_1
- yarl=1.18.0=py39h5eee18b_0
- zlib=1.2.13=h5eee18b_1
- pip:
- exceptiongroup==1.2.2
- iniconfig==2.1.0
- mock==5.2.0
- packaging==24.2
- pluggy==1.5.0
- pytest==8.3.5
- tomli==2.2.1
prefix: /opt/conda/envs/budou
| [
"test/budou_test.py::TestBudouMethods::test_update_punct_direction"
]
| []
| [
"test/budou_test.py::TestBudouMethods::test_cache",
"test/budou_test.py::TestBudouMethods::test_get_attribute_dict",
"test/budou_test.py::TestBudouMethods::test_get_chunks_per_space",
"test/budou_test.py::TestBudouMethods::test_get_elements_list",
"test/budou_test.py::TestBudouMethods::test_get_source_chunks",
"test/budou_test.py::TestBudouMethods::test_migrate_html",
"test/budou_test.py::TestBudouMethods::test_parse_ja",
"test/budou_test.py::TestBudouMethods::test_parse_ko",
"test/budou_test.py::TestBudouMethods::test_preprocess",
"test/budou_test.py::TestBudouMethods::test_process_with_aria",
"test/budou_test.py::TestBudouMethods::test_spanize"
]
| []
| Apache License 2.0 | 1,170 | [
"budou/budou.py"
]
| [
"budou/budou.py"
]
|
|
tableau__server-client-python-166 | 312069666ba7afeea1093506ee3690d5bba23afb | 2017-04-10 17:37:27 | 312069666ba7afeea1093506ee3690d5bba23afb | diff --git a/samples/initialize_server.py b/samples/initialize_server.py
index 848159a..a3e312c 100644
--- a/samples/initialize_server.py
+++ b/samples/initialize_server.py
@@ -11,7 +11,6 @@ import logging
import tableauserverclient as TSC
-
def main():
parser = argparse.ArgumentParser(description='Initialize a server with content.')
parser.add_argument('--server', '-s', required=True, help='server address')
@@ -68,7 +67,8 @@ def main():
################################################################################
# Step 4: Create the project we need only if it doesn't exist
################################################################################
- import time; time.sleep(2) # sad panda...something about eventually consistent model
+ import time
+ time.sleep(2) # sad panda...something about eventually consistent model
all_projects = TSC.Pager(server_upload.projects)
project = next((p for p in all_projects if p.name.lower() == args.project.lower()), None)
diff --git a/tableauserverclient/filesys_helpers.py b/tableauserverclient/filesys_helpers.py
new file mode 100644
index 0000000..0cf304b
--- /dev/null
+++ b/tableauserverclient/filesys_helpers.py
@@ -0,0 +1,6 @@
+ALLOWED_SPECIAL = (' ', '.', '_', '-')
+
+
+def to_filename(string_to_sanitize):
+ sanitized = (c for c in string_to_sanitize if c.isalnum() or c in ALLOWED_SPECIAL)
+ return "".join(sanitized)
diff --git a/tableauserverclient/server/endpoint/datasources_endpoint.py b/tableauserverclient/server/endpoint/datasources_endpoint.py
index 6ab275d..5491736 100644
--- a/tableauserverclient/server/endpoint/datasources_endpoint.py
+++ b/tableauserverclient/server/endpoint/datasources_endpoint.py
@@ -2,6 +2,7 @@ from .endpoint import Endpoint, api, parameter_added_in
from .exceptions import MissingRequiredFieldError
from .fileuploads_endpoint import Fileuploads
from .. import RequestFactory, DatasourceItem, PaginationItem, ConnectionItem
+from ...filesys_helpers import to_filename
import os
import logging
import copy
@@ -77,7 +78,7 @@ class Datasources(Endpoint):
with closing(self.get_request(url, parameters={'stream': True})) as server_response:
_, params = cgi.parse_header(server_response.headers['Content-Disposition'])
- filename = os.path.basename(params['filename'])
+ filename = to_filename(os.path.basename(params['filename']))
if filepath is None:
filepath = filename
elif os.path.isdir(filepath):
diff --git a/tableauserverclient/server/endpoint/groups_endpoint.py b/tableauserverclient/server/endpoint/groups_endpoint.py
index a1485a4..243aa54 100644
--- a/tableauserverclient/server/endpoint/groups_endpoint.py
+++ b/tableauserverclient/server/endpoint/groups_endpoint.py
@@ -58,10 +58,10 @@ class Groups(Endpoint):
def remove_user(self, group_item, user_id):
self._remove_user(group_item, user_id)
try:
- user_set = group_item.users
- for user in user_set:
+ users = group_item.users
+ for user in users:
if user.id == user_id:
- user_set.remove(user)
+ users.remove(user)
break
except UnpopulatedPropertyError:
# If we aren't populated, do nothing to the user list
@@ -73,9 +73,9 @@ class Groups(Endpoint):
def add_user(self, group_item, user_id):
new_user = self._add_user(group_item, user_id)
try:
- user_set = group_item.users
- user_set.add(new_user)
- group_item._set_users(user_set)
+ users = group_item.users
+ users.append(new_user)
+ group_item._set_users(users)
except UnpopulatedPropertyError:
# If we aren't populated, do nothing to the user list
pass
diff --git a/tableauserverclient/server/endpoint/workbooks_endpoint.py b/tableauserverclient/server/endpoint/workbooks_endpoint.py
index 850df9f..4d72f69 100644
--- a/tableauserverclient/server/endpoint/workbooks_endpoint.py
+++ b/tableauserverclient/server/endpoint/workbooks_endpoint.py
@@ -3,6 +3,7 @@ from .exceptions import MissingRequiredFieldError
from .fileuploads_endpoint import Fileuploads
from .. import RequestFactory, WorkbookItem, ConnectionItem, ViewItem, PaginationItem
from ...models.tag_item import TagItem
+from ...filesys_helpers import to_filename
import os
import logging
import copy
@@ -112,7 +113,7 @@ class Workbooks(Endpoint):
with closing(self.get_request(url, parameters={"stream": True})) as server_response:
_, params = cgi.parse_header(server_response.headers['Content-Disposition'])
- filename = os.path.basename(params['filename'])
+ filename = to_filename(os.path.basename(params['filename']))
if filepath is None:
filepath = filename
elif os.path.isdir(filepath):
| Downloads with invalid characters in the workbook name fail
When downloading workbooks with a name like `Option A or B?` it fails with the error `OSError(22, 'Invalid argument')` on windows because question marks aren't allowed in file names. (Note: when downloading through the browser it converts characters like this to safe ones.. i.e. `>` becomes `_`)
Looking at the [code](https://github.com/tableau/server-client-python/blob/8e6c9078c0f5894e034393f7fcbc9f14f10c5838/tableauserverclient/server/endpoint/workbooks_endpoint.py#L91) I don't see an easy way to overwrite the downloaded workbook name.
Two Requests:
1) adding an optional string parameter to set the downloaded workbook name.
2) adding an optional boolean parameter to download the workbook name the way the browser / server handles it.
(One of these options should take precedence over the other as they are mutually exclusive but both options would be useful). | tableau/server-client-python | diff --git a/test/test_datasource.py b/test/test_datasource.py
index ebf17cf..a2732db 100644
--- a/test/test_datasource.py
+++ b/test/test_datasource.py
@@ -145,6 +145,17 @@ class DatasourceTests(unittest.TestCase):
self.assertTrue(os.path.exists(file_path))
os.remove(file_path)
+ def test_download_sanitizes_name(self):
+ filename = "Name,With,Commas.tds"
+ disposition = 'name="tableau_workbook"; filename="{}"'.format(filename)
+ with requests_mock.mock() as m:
+ m.get(self.baseurl + '/1f951daf-4061-451a-9df1-69a8062664f2/content',
+ headers={'Content-Disposition': disposition})
+ file_path = self.server.datasources.download('1f951daf-4061-451a-9df1-69a8062664f2')
+ self.assertEqual(os.path.basename(file_path), "NameWithCommas.tds")
+ self.assertTrue(os.path.exists(file_path))
+ os.remove(file_path)
+
def test_download_extract_only(self):
# Pretend we're 2.5 for 'extract_only'
self.server.version = "2.5"
diff --git a/test/test_group.py b/test/test_group.py
index 2f7f227..20c4545 100644
--- a/test/test_group.py
+++ b/test/test_group.py
@@ -92,7 +92,7 @@ class GroupTests(unittest.TestCase):
m.post(self.baseurl + '/e7833b48-c6f7-47b5-a2a7-36e7dd232758/users', text=response_xml)
single_group = TSC.GroupItem('test')
single_group._id = 'e7833b48-c6f7-47b5-a2a7-36e7dd232758'
- single_group._users = set()
+ single_group._users = []
self.server.groups.add_user(single_group, '5de011f8-5aa9-4d5b-b991-f462c8dd6bb7')
self.assertEqual(1, len(single_group.users))
diff --git a/test/test_workbook.py b/test/test_workbook.py
index d276ece..0c5ecca 100644
--- a/test/test_workbook.py
+++ b/test/test_workbook.py
@@ -170,6 +170,17 @@ class WorkbookTests(unittest.TestCase):
self.assertTrue(os.path.exists(file_path))
os.remove(file_path)
+ def test_download_sanitizes_name(self):
+ filename = "Name,With,Commas.twbx"
+ disposition = 'name="tableau_workbook"; filename="{}"'.format(filename)
+ with requests_mock.mock() as m:
+ m.get(self.baseurl + '/1f951daf-4061-451a-9df1-69a8062664f2/content',
+ headers={'Content-Disposition': disposition})
+ file_path = self.server.workbooks.download('1f951daf-4061-451a-9df1-69a8062664f2')
+ self.assertEqual(os.path.basename(file_path), "NameWithCommas.twbx")
+ self.assertTrue(os.path.exists(file_path))
+ os.remove(file_path)
+
def test_download_extract_only(self):
# Pretend we're 2.5 for 'extract_only'
self.server.version = "2.5"
| {
"commit_name": "merge_commit",
"failed_lite_validators": [
"has_added_files",
"has_many_modified_files",
"has_many_hunks"
],
"has_test_patch": true,
"is_lite": false,
"llm_score": {
"difficulty_score": 1,
"issue_text_score": 2,
"test_score": 0
},
"num_modified_files": 4
} | 0.3 | {
"env_vars": null,
"env_yml_path": null,
"install": "pip install -e .",
"log_parser": "parse_log_pytest",
"no_use_env": null,
"packages": "requirements.txt",
"pip_packages": [
"pytest",
"requests-mock"
],
"pre_install": null,
"python": "3.6",
"reqs_path": null,
"test_cmd": "pytest --no-header -rA --tb=line --color=no -p no:cacheprovider -W ignore::DeprecationWarning"
} | attrs==22.2.0
certifi==2021.5.30
charset-normalizer==2.0.12
idna==3.10
importlib-metadata==4.8.3
iniconfig==1.1.1
packaging==21.3
pluggy==1.0.0
py==1.11.0
pyparsing==3.1.4
pytest==7.0.1
requests==2.11.1
requests-mock==1.12.1
-e git+https://github.com/tableau/server-client-python.git@312069666ba7afeea1093506ee3690d5bba23afb#egg=tableauserverclient
tomli==1.2.3
typing_extensions==4.1.1
urllib3==1.26.20
zipp==3.6.0
| name: server-client-python
channels:
- defaults
- https://repo.anaconda.com/pkgs/main
- https://repo.anaconda.com/pkgs/r
- conda-forge
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- ca-certificates=2025.2.25=h06a4308_0
- certifi=2021.5.30=py36h06a4308_0
- ld_impl_linux-64=2.40=h12ee557_0
- libffi=3.3=he6710b0_2
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- ncurses=6.4=h6a678d5_0
- openssl=1.1.1w=h7f8727e_0
- pip=21.2.2=py36h06a4308_0
- python=3.6.13=h12debd9_1
- readline=8.2=h5eee18b_0
- setuptools=58.0.4=py36h06a4308_0
- sqlite=3.45.3=h5eee18b_0
- tk=8.6.14=h39e8969_0
- wheel=0.37.1=pyhd3eb1b0_0
- xz=5.6.4=h5eee18b_1
- zlib=1.2.13=h5eee18b_1
- pip:
- attrs==22.2.0
- charset-normalizer==2.0.12
- idna==3.10
- importlib-metadata==4.8.3
- iniconfig==1.1.1
- packaging==21.3
- pluggy==1.0.0
- py==1.11.0
- pyparsing==3.1.4
- pytest==7.0.1
- requests==2.11.1
- requests-mock==1.12.1
- tomli==1.2.3
- typing-extensions==4.1.1
- urllib3==1.26.20
- zipp==3.6.0
prefix: /opt/conda/envs/server-client-python
| [
"test/test_datasource.py::DatasourceTests::test_download_sanitizes_name",
"test/test_group.py::GroupTests::test_add_user",
"test/test_workbook.py::WorkbookTests::test_download_sanitizes_name"
]
| []
| [
"test/test_datasource.py::DatasourceTests::test_delete",
"test/test_datasource.py::DatasourceTests::test_download",
"test/test_datasource.py::DatasourceTests::test_download_extract_only",
"test/test_datasource.py::DatasourceTests::test_get",
"test/test_datasource.py::DatasourceTests::test_get_before_signin",
"test/test_datasource.py::DatasourceTests::test_get_by_id",
"test/test_datasource.py::DatasourceTests::test_get_empty",
"test/test_datasource.py::DatasourceTests::test_publish",
"test/test_datasource.py::DatasourceTests::test_publish_invalid_file_type",
"test/test_datasource.py::DatasourceTests::test_publish_missing_mode",
"test/test_datasource.py::DatasourceTests::test_publish_missing_path",
"test/test_datasource.py::DatasourceTests::test_update",
"test/test_datasource.py::DatasourceTests::test_update_copy_fields",
"test/test_datasource.py::DatasourceTests::test_update_missing_id",
"test/test_group.py::GroupTests::test_add_user_before_populating",
"test/test_group.py::GroupTests::test_add_user_missing_group_id",
"test/test_group.py::GroupTests::test_add_user_missing_user_id",
"test/test_group.py::GroupTests::test_create_group",
"test/test_group.py::GroupTests::test_delete",
"test/test_group.py::GroupTests::test_get",
"test/test_group.py::GroupTests::test_get_before_signin",
"test/test_group.py::GroupTests::test_populate_users",
"test/test_group.py::GroupTests::test_remove_user",
"test/test_group.py::GroupTests::test_remove_user_before_populating",
"test/test_group.py::GroupTests::test_remove_user_missing_group_id",
"test/test_group.py::GroupTests::test_remove_user_missing_user_id",
"test/test_workbook.py::WorkbookTests::test_delete",
"test/test_workbook.py::WorkbookTests::test_delete_missing_id",
"test/test_workbook.py::WorkbookTests::test_download",
"test/test_workbook.py::WorkbookTests::test_download_extract_only",
"test/test_workbook.py::WorkbookTests::test_download_missing_id",
"test/test_workbook.py::WorkbookTests::test_get",
"test/test_workbook.py::WorkbookTests::test_get_before_signin",
"test/test_workbook.py::WorkbookTests::test_get_by_id",
"test/test_workbook.py::WorkbookTests::test_get_by_id_missing_id",
"test/test_workbook.py::WorkbookTests::test_get_empty",
"test/test_workbook.py::WorkbookTests::test_populate_connections",
"test/test_workbook.py::WorkbookTests::test_populate_connections_missing_id",
"test/test_workbook.py::WorkbookTests::test_populate_preview_image",
"test/test_workbook.py::WorkbookTests::test_populate_preview_image_missing_id",
"test/test_workbook.py::WorkbookTests::test_populate_views",
"test/test_workbook.py::WorkbookTests::test_populate_views_missing_id",
"test/test_workbook.py::WorkbookTests::test_publish",
"test/test_workbook.py::WorkbookTests::test_publish_invalid_file",
"test/test_workbook.py::WorkbookTests::test_publish_invalid_file_type",
"test/test_workbook.py::WorkbookTests::test_update",
"test/test_workbook.py::WorkbookTests::test_update_copy_fields",
"test/test_workbook.py::WorkbookTests::test_update_missing_id",
"test/test_workbook.py::WorkbookTests::test_update_tags"
]
| []
| MIT License | 1,171 | [
"tableauserverclient/server/endpoint/groups_endpoint.py",
"tableauserverclient/filesys_helpers.py",
"samples/initialize_server.py",
"tableauserverclient/server/endpoint/datasources_endpoint.py",
"tableauserverclient/server/endpoint/workbooks_endpoint.py"
]
| [
"tableauserverclient/server/endpoint/groups_endpoint.py",
"tableauserverclient/filesys_helpers.py",
"samples/initialize_server.py",
"tableauserverclient/server/endpoint/datasources_endpoint.py",
"tableauserverclient/server/endpoint/workbooks_endpoint.py"
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.